re PR fortran/35031 (ELEMENTAL procedure with BIND(C))
[official-gcc.git] / gcc / builtins.c
blob6135bc5a4e2359a1f514310ac28d6dd34808de13
1 /* Expand builtin functions.
2 Copyright (C) 1988-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "params.h"
35 #include "tm_p.h"
36 #include "stringpool.h"
37 #include "tree-vrp.h"
38 #include "tree-ssanames.h"
39 #include "expmed.h"
40 #include "optabs.h"
41 #include "emit-rtl.h"
42 #include "recog.h"
43 #include "diagnostic-core.h"
44 #include "alias.h"
45 #include "fold-const.h"
46 #include "fold-const-call.h"
47 #include "gimple-ssa-warn-restrict.h"
48 #include "stor-layout.h"
49 #include "calls.h"
50 #include "varasm.h"
51 #include "tree-object-size.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
76 struct target_builtins default_target_builtins;
77 #if SWITCHABLE_TARGET
78 struct target_builtins *this_target_builtins = &default_target_builtins;
79 #endif
81 /* Define the names of the builtin function types and codes. */
82 const char *const built_in_class_names[BUILT_IN_LAST]
83 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
85 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
86 const char * built_in_names[(int) END_BUILTINS] =
88 #include "builtins.def"
91 /* Setup an array of builtin_info_type, make sure each element decl is
92 initialized to NULL_TREE. */
93 builtin_info_type builtin_info[(int)END_BUILTINS];
95 /* Non-zero if __builtin_constant_p should be folded right away. */
96 bool force_folding_builtin_constant_p;
98 static rtx c_readstr (const char *, scalar_int_mode);
99 static int target_char_cast (tree, char *);
100 static rtx get_memory_rtx (tree, tree);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx result_vector (int, rtx);
104 static void expand_builtin_prefetch (tree);
105 static rtx expand_builtin_apply_args (void);
106 static rtx expand_builtin_apply_args_1 (void);
107 static rtx expand_builtin_apply (rtx, rtx, rtx);
108 static void expand_builtin_return (rtx);
109 static enum type_class type_to_class (tree);
110 static rtx expand_builtin_classify_type (tree);
111 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
112 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
113 static rtx expand_builtin_interclass_mathfn (tree, rtx);
114 static rtx expand_builtin_sincos (tree);
115 static rtx expand_builtin_cexpi (tree, rtx);
116 static rtx expand_builtin_int_roundingfn (tree, rtx);
117 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
118 static rtx expand_builtin_next_arg (void);
119 static rtx expand_builtin_va_start (tree);
120 static rtx expand_builtin_va_end (tree);
121 static rtx expand_builtin_va_copy (tree);
122 static rtx inline_expand_builtin_string_cmp (tree, rtx);
123 static rtx expand_builtin_strcmp (tree, rtx);
124 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
125 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
126 static rtx expand_builtin_memchr (tree, rtx);
127 static rtx expand_builtin_memcpy (tree, rtx);
128 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
129 rtx target, tree exp,
130 memop_ret retmode);
131 static rtx expand_builtin_memmove (tree, rtx);
132 static rtx expand_builtin_mempcpy (tree, rtx);
133 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
134 static rtx expand_builtin_strcat (tree, rtx);
135 static rtx expand_builtin_strcpy (tree, rtx);
136 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
137 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
138 static rtx expand_builtin_stpncpy (tree, rtx);
139 static rtx expand_builtin_strncat (tree, rtx);
140 static rtx expand_builtin_strncpy (tree, rtx);
141 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
142 static rtx expand_builtin_memset (tree, rtx, machine_mode);
143 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
144 static rtx expand_builtin_bzero (tree);
145 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
146 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
147 static rtx expand_builtin_alloca (tree);
148 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
149 static rtx expand_builtin_frame_address (tree, tree);
150 static tree stabilize_va_list_loc (location_t, tree, int);
151 static rtx expand_builtin_expect (tree, rtx);
152 static rtx expand_builtin_expect_with_probability (tree, rtx);
153 static tree fold_builtin_constant_p (tree);
154 static tree fold_builtin_classify_type (tree);
155 static tree fold_builtin_strlen (location_t, tree, tree);
156 static tree fold_builtin_inf (location_t, tree, int);
157 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
158 static bool validate_arg (const_tree, enum tree_code code);
159 static rtx expand_builtin_fabs (tree, rtx, rtx);
160 static rtx expand_builtin_signbit (tree, rtx);
161 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
162 static tree fold_builtin_isascii (location_t, tree);
163 static tree fold_builtin_toascii (location_t, tree);
164 static tree fold_builtin_isdigit (location_t, tree);
165 static tree fold_builtin_fabs (location_t, tree, tree);
166 static tree fold_builtin_abs (location_t, tree, tree);
167 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
168 enum tree_code);
169 static tree fold_builtin_0 (location_t, tree);
170 static tree fold_builtin_1 (location_t, tree, tree);
171 static tree fold_builtin_2 (location_t, tree, tree, tree);
172 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
173 static tree fold_builtin_varargs (location_t, tree, tree*, int);
175 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
176 static tree fold_builtin_strspn (location_t, tree, tree);
177 static tree fold_builtin_strcspn (location_t, tree, tree);
179 static rtx expand_builtin_object_size (tree);
180 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
181 enum built_in_function);
182 static void maybe_emit_chk_warning (tree, enum built_in_function);
183 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
184 static void maybe_emit_free_warning (tree);
185 static tree fold_builtin_object_size (tree, tree);
187 unsigned HOST_WIDE_INT target_newline;
188 unsigned HOST_WIDE_INT target_percent;
189 static unsigned HOST_WIDE_INT target_c;
190 static unsigned HOST_WIDE_INT target_s;
191 char target_percent_c[3];
192 char target_percent_s[3];
193 char target_percent_s_newline[4];
194 static tree do_mpfr_remquo (tree, tree, tree);
195 static tree do_mpfr_lgamma_r (tree, tree, tree);
196 static void expand_builtin_sync_synchronize (void);
198 /* Return true if NAME starts with __builtin_ or __sync_. */
200 static bool
201 is_builtin_name (const char *name)
203 if (strncmp (name, "__builtin_", 10) == 0)
204 return true;
205 if (strncmp (name, "__sync_", 7) == 0)
206 return true;
207 if (strncmp (name, "__atomic_", 9) == 0)
208 return true;
209 return false;
212 /* Return true if NODE should be considered for inline expansion regardless
213 of the optimization level. This means whenever a function is invoked with
214 its "internal" name, which normally contains the prefix "__builtin". */
216 bool
217 called_as_built_in (tree node)
219 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
220 we want the name used to call the function, not the name it
221 will have. */
222 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
223 return is_builtin_name (name);
226 /* Compute values M and N such that M divides (address of EXP - N) and such
227 that N < M. If these numbers can be determined, store M in alignp and N in
228 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
229 *alignp and any bit-offset to *bitposp.
231 Note that the address (and thus the alignment) computed here is based
232 on the address to which a symbol resolves, whereas DECL_ALIGN is based
233 on the address at which an object is actually located. These two
234 addresses are not always the same. For example, on ARM targets,
235 the address &foo of a Thumb function foo() has the lowest bit set,
236 whereas foo() itself starts on an even address.
238 If ADDR_P is true we are taking the address of the memory reference EXP
239 and thus cannot rely on the access taking place. */
241 static bool
242 get_object_alignment_2 (tree exp, unsigned int *alignp,
243 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
245 poly_int64 bitsize, bitpos;
246 tree offset;
247 machine_mode mode;
248 int unsignedp, reversep, volatilep;
249 unsigned int align = BITS_PER_UNIT;
250 bool known_alignment = false;
252 /* Get the innermost object and the constant (bitpos) and possibly
253 variable (offset) offset of the access. */
254 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
255 &unsignedp, &reversep, &volatilep);
257 /* Extract alignment information from the innermost object and
258 possibly adjust bitpos and offset. */
259 if (TREE_CODE (exp) == FUNCTION_DECL)
261 /* Function addresses can encode extra information besides their
262 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
263 allows the low bit to be used as a virtual bit, we know
264 that the address itself must be at least 2-byte aligned. */
265 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
266 align = 2 * BITS_PER_UNIT;
268 else if (TREE_CODE (exp) == LABEL_DECL)
270 else if (TREE_CODE (exp) == CONST_DECL)
272 /* The alignment of a CONST_DECL is determined by its initializer. */
273 exp = DECL_INITIAL (exp);
274 align = TYPE_ALIGN (TREE_TYPE (exp));
275 if (CONSTANT_CLASS_P (exp))
276 align = targetm.constant_alignment (exp, align);
278 known_alignment = true;
280 else if (DECL_P (exp))
282 align = DECL_ALIGN (exp);
283 known_alignment = true;
285 else if (TREE_CODE (exp) == INDIRECT_REF
286 || TREE_CODE (exp) == MEM_REF
287 || TREE_CODE (exp) == TARGET_MEM_REF)
289 tree addr = TREE_OPERAND (exp, 0);
290 unsigned ptr_align;
291 unsigned HOST_WIDE_INT ptr_bitpos;
292 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
294 /* If the address is explicitely aligned, handle that. */
295 if (TREE_CODE (addr) == BIT_AND_EXPR
296 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
298 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
299 ptr_bitmask *= BITS_PER_UNIT;
300 align = least_bit_hwi (ptr_bitmask);
301 addr = TREE_OPERAND (addr, 0);
304 known_alignment
305 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
306 align = MAX (ptr_align, align);
308 /* Re-apply explicit alignment to the bitpos. */
309 ptr_bitpos &= ptr_bitmask;
311 /* The alignment of the pointer operand in a TARGET_MEM_REF
312 has to take the variable offset parts into account. */
313 if (TREE_CODE (exp) == TARGET_MEM_REF)
315 if (TMR_INDEX (exp))
317 unsigned HOST_WIDE_INT step = 1;
318 if (TMR_STEP (exp))
319 step = TREE_INT_CST_LOW (TMR_STEP (exp));
320 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
322 if (TMR_INDEX2 (exp))
323 align = BITS_PER_UNIT;
324 known_alignment = false;
327 /* When EXP is an actual memory reference then we can use
328 TYPE_ALIGN of a pointer indirection to derive alignment.
329 Do so only if get_pointer_alignment_1 did not reveal absolute
330 alignment knowledge and if using that alignment would
331 improve the situation. */
332 unsigned int talign;
333 if (!addr_p && !known_alignment
334 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
335 && talign > align)
336 align = talign;
337 else
339 /* Else adjust bitpos accordingly. */
340 bitpos += ptr_bitpos;
341 if (TREE_CODE (exp) == MEM_REF
342 || TREE_CODE (exp) == TARGET_MEM_REF)
343 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
346 else if (TREE_CODE (exp) == STRING_CST)
348 /* STRING_CST are the only constant objects we allow to be not
349 wrapped inside a CONST_DECL. */
350 align = TYPE_ALIGN (TREE_TYPE (exp));
351 if (CONSTANT_CLASS_P (exp))
352 align = targetm.constant_alignment (exp, align);
354 known_alignment = true;
357 /* If there is a non-constant offset part extract the maximum
358 alignment that can prevail. */
359 if (offset)
361 unsigned int trailing_zeros = tree_ctz (offset);
362 if (trailing_zeros < HOST_BITS_PER_INT)
364 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
365 if (inner)
366 align = MIN (align, inner);
370 /* Account for the alignment of runtime coefficients, so that the constant
371 bitpos is guaranteed to be accurate. */
372 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
373 if (alt_align != 0 && alt_align < align)
375 align = alt_align;
376 known_alignment = false;
379 *alignp = align;
380 *bitposp = bitpos.coeffs[0] & (align - 1);
381 return known_alignment;
384 /* For a memory reference expression EXP compute values M and N such that M
385 divides (&EXP - N) and such that N < M. If these numbers can be determined,
386 store M in alignp and N in *BITPOSP and return true. Otherwise return false
387 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
389 bool
390 get_object_alignment_1 (tree exp, unsigned int *alignp,
391 unsigned HOST_WIDE_INT *bitposp)
393 return get_object_alignment_2 (exp, alignp, bitposp, false);
396 /* Return the alignment in bits of EXP, an object. */
398 unsigned int
399 get_object_alignment (tree exp)
401 unsigned HOST_WIDE_INT bitpos = 0;
402 unsigned int align;
404 get_object_alignment_1 (exp, &align, &bitpos);
406 /* align and bitpos now specify known low bits of the pointer.
407 ptr & (align - 1) == bitpos. */
409 if (bitpos != 0)
410 align = least_bit_hwi (bitpos);
411 return align;
414 /* For a pointer valued expression EXP compute values M and N such that M
415 divides (EXP - N) and such that N < M. If these numbers can be determined,
416 store M in alignp and N in *BITPOSP and return true. Return false if
417 the results are just a conservative approximation.
419 If EXP is not a pointer, false is returned too. */
421 bool
422 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
423 unsigned HOST_WIDE_INT *bitposp)
425 STRIP_NOPS (exp);
427 if (TREE_CODE (exp) == ADDR_EXPR)
428 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
429 alignp, bitposp, true);
430 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
432 unsigned int align;
433 unsigned HOST_WIDE_INT bitpos;
434 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
435 &align, &bitpos);
436 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
437 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
438 else
440 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
441 if (trailing_zeros < HOST_BITS_PER_INT)
443 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
444 if (inner)
445 align = MIN (align, inner);
448 *alignp = align;
449 *bitposp = bitpos & (align - 1);
450 return res;
452 else if (TREE_CODE (exp) == SSA_NAME
453 && POINTER_TYPE_P (TREE_TYPE (exp)))
455 unsigned int ptr_align, ptr_misalign;
456 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
458 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
460 *bitposp = ptr_misalign * BITS_PER_UNIT;
461 *alignp = ptr_align * BITS_PER_UNIT;
462 /* Make sure to return a sensible alignment when the multiplication
463 by BITS_PER_UNIT overflowed. */
464 if (*alignp == 0)
465 *alignp = 1u << (HOST_BITS_PER_INT - 1);
466 /* We cannot really tell whether this result is an approximation. */
467 return false;
469 else
471 *bitposp = 0;
472 *alignp = BITS_PER_UNIT;
473 return false;
476 else if (TREE_CODE (exp) == INTEGER_CST)
478 *alignp = BIGGEST_ALIGNMENT;
479 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
480 & (BIGGEST_ALIGNMENT - 1));
481 return true;
484 *bitposp = 0;
485 *alignp = BITS_PER_UNIT;
486 return false;
489 /* Return the alignment in bits of EXP, a pointer valued expression.
490 The alignment returned is, by default, the alignment of the thing that
491 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
493 Otherwise, look at the expression to see if we can do better, i.e., if the
494 expression is actually pointing at an object whose alignment is tighter. */
496 unsigned int
497 get_pointer_alignment (tree exp)
499 unsigned HOST_WIDE_INT bitpos = 0;
500 unsigned int align;
502 get_pointer_alignment_1 (exp, &align, &bitpos);
504 /* align and bitpos now specify known low bits of the pointer.
505 ptr & (align - 1) == bitpos. */
507 if (bitpos != 0)
508 align = least_bit_hwi (bitpos);
510 return align;
513 /* Return the number of leading non-zero elements in the sequence
514 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
515 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
517 unsigned
518 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
520 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
522 unsigned n;
524 if (eltsize == 1)
526 /* Optimize the common case of plain char. */
527 for (n = 0; n < maxelts; n++)
529 const char *elt = (const char*) ptr + n;
530 if (!*elt)
531 break;
534 else
536 for (n = 0; n < maxelts; n++)
538 const char *elt = (const char*) ptr + n * eltsize;
539 if (!memcmp (elt, "\0\0\0\0", eltsize))
540 break;
543 return n;
546 /* For a call at LOC to a function FN that expects a string in the argument
547 ARG, issue a diagnostic due to it being a called with an argument
548 declared at NONSTR that is a character array with no terminating NUL. */
550 void
551 warn_string_no_nul (location_t loc, const char *fn, tree arg, tree decl)
553 if (TREE_NO_WARNING (arg))
554 return;
556 loc = expansion_point_location_if_in_system_header (loc);
558 if (warning_at (loc, OPT_Wstringop_overflow_,
559 "%qs argument missing terminating nul", fn))
561 inform (DECL_SOURCE_LOCATION (decl),
562 "referenced argument declared here");
563 TREE_NO_WARNING (arg) = 1;
567 /* If EXP refers to an unterminated constant character array return
568 the declaration of the object of which the array is a member or
569 element and if SIZE is not null, set *SIZE to the size of
570 the unterminated array and set *EXACT if the size is exact or
571 clear it otherwise. Otherwise return null. */
573 tree
574 unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */)
576 /* C_STRLEN will return NULL and set DECL in the info
577 structure if EXP references a unterminated array. */
578 c_strlen_data lendata = { };
579 tree len = c_strlen (exp, 1, &lendata);
580 if (len == NULL_TREE && lendata.minlen && lendata.decl)
582 if (size)
584 len = lendata.minlen;
585 if (lendata.off)
587 /* Constant offsets are already accounted for in LENDATA.MINLEN,
588 but not in a SSA_NAME + CST expression. */
589 if (TREE_CODE (lendata.off) == INTEGER_CST)
590 *exact = true;
591 else if (TREE_CODE (lendata.off) == PLUS_EXPR
592 && TREE_CODE (TREE_OPERAND (lendata.off, 1)) == INTEGER_CST)
594 /* Subtract the offset from the size of the array. */
595 *exact = false;
596 tree temp = TREE_OPERAND (lendata.off, 1);
597 temp = fold_convert (ssizetype, temp);
598 len = fold_build2 (MINUS_EXPR, ssizetype, len, temp);
600 else
601 *exact = false;
603 else
604 *exact = true;
606 *size = len;
608 return lendata.decl;
611 return NULL_TREE;
614 /* Compute the length of a null-terminated character string or wide
615 character string handling character sizes of 1, 2, and 4 bytes.
616 TREE_STRING_LENGTH is not the right way because it evaluates to
617 the size of the character array in bytes (as opposed to characters)
618 and because it can contain a zero byte in the middle.
620 ONLY_VALUE should be nonzero if the result is not going to be emitted
621 into the instruction stream and zero if it is going to be expanded.
622 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
623 is returned, otherwise NULL, since
624 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
625 evaluate the side-effects.
627 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
628 accesses. Note that this implies the result is not going to be emitted
629 into the instruction stream.
631 Additional information about the string accessed may be recorded
632 in DATA. For example, if SRC references an unterminated string,
633 then the declaration will be stored in the DECL field. If the
634 length of the unterminated string can be determined, it'll be
635 stored in the LEN field. Note this length could well be different
636 than what a C strlen call would return.
638 ELTSIZE is 1 for normal single byte character strings, and 2 or
639 4 for wide characer strings. ELTSIZE is by default 1.
641 The value returned is of type `ssizetype'. */
643 tree
644 c_strlen (tree src, int only_value, c_strlen_data *data, unsigned eltsize)
646 /* If we were not passed a DATA pointer, then get one to a local
647 structure. That avoids having to check DATA for NULL before
648 each time we want to use it. */
649 c_strlen_data local_strlen_data = { };
650 if (!data)
651 data = &local_strlen_data;
653 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
654 STRIP_NOPS (src);
655 if (TREE_CODE (src) == COND_EXPR
656 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
658 tree len1, len2;
660 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
661 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
662 if (tree_int_cst_equal (len1, len2))
663 return len1;
666 if (TREE_CODE (src) == COMPOUND_EXPR
667 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
668 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
670 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
672 /* Offset from the beginning of the string in bytes. */
673 tree byteoff;
674 tree memsize;
675 tree decl;
676 src = string_constant (src, &byteoff, &memsize, &decl);
677 if (src == 0)
678 return NULL_TREE;
680 /* Determine the size of the string element. */
681 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
682 return NULL_TREE;
684 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
685 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
686 in case the latter is less than the size of the array, such as when
687 SRC refers to a short string literal used to initialize a large array.
688 In that case, the elements of the array after the terminating NUL are
689 all NUL. */
690 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
691 strelts = strelts / eltsize;
693 if (!tree_fits_uhwi_p (memsize))
694 return NULL_TREE;
696 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
698 /* PTR can point to the byte representation of any string type, including
699 char* and wchar_t*. */
700 const char *ptr = TREE_STRING_POINTER (src);
702 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
704 /* The code below works only for single byte character types. */
705 if (eltsize != 1)
706 return NULL_TREE;
708 /* If the string has an internal NUL character followed by any
709 non-NUL characters (e.g., "foo\0bar"), we can't compute
710 the offset to the following NUL if we don't know where to
711 start searching for it. */
712 unsigned len = string_length (ptr, eltsize, strelts);
714 /* Return when an embedded null character is found or none at all.
715 In the latter case, set the DECL/LEN field in the DATA structure
716 so that callers may examine them. */
717 if (len + 1 < strelts)
718 return NULL_TREE;
719 else if (len >= maxelts)
721 data->decl = decl;
722 data->off = byteoff;
723 data->minlen = ssize_int (len);
724 return NULL_TREE;
727 /* For empty strings the result should be zero. */
728 if (len == 0)
729 return ssize_int (0);
731 /* We don't know the starting offset, but we do know that the string
732 has no internal zero bytes. If the offset falls within the bounds
733 of the string subtract the offset from the length of the string,
734 and return that. Otherwise the length is zero. Take care to
735 use SAVE_EXPR in case the OFFSET has side-effects. */
736 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
737 : byteoff;
738 offsave = fold_convert_loc (loc, sizetype, offsave);
739 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
740 size_int (len));
741 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
742 offsave);
743 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
744 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
745 build_zero_cst (ssizetype));
748 /* Offset from the beginning of the string in elements. */
749 HOST_WIDE_INT eltoff;
751 /* We have a known offset into the string. Start searching there for
752 a null character if we can represent it as a single HOST_WIDE_INT. */
753 if (byteoff == 0)
754 eltoff = 0;
755 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
756 eltoff = -1;
757 else
758 eltoff = tree_to_uhwi (byteoff) / eltsize;
760 /* If the offset is known to be out of bounds, warn, and call strlen at
761 runtime. */
762 if (eltoff < 0 || eltoff >= maxelts)
764 /* Suppress multiple warnings for propagated constant strings. */
765 if (only_value != 2
766 && !TREE_NO_WARNING (src))
768 warning_at (loc, OPT_Warray_bounds,
769 "offset %qwi outside bounds of constant string",
770 eltoff);
771 TREE_NO_WARNING (src) = 1;
773 return NULL_TREE;
776 /* If eltoff is larger than strelts but less than maxelts the
777 string length is zero, since the excess memory will be zero. */
778 if (eltoff > strelts)
779 return ssize_int (0);
781 /* Use strlen to search for the first zero byte. Since any strings
782 constructed with build_string will have nulls appended, we win even
783 if we get handed something like (char[4])"abcd".
785 Since ELTOFF is our starting index into the string, no further
786 calculation is needed. */
787 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
788 strelts - eltoff);
790 /* Don't know what to return if there was no zero termination.
791 Ideally this would turn into a gcc_checking_assert over time.
792 Set DECL/LEN so callers can examine them. */
793 if (len >= maxelts - eltoff)
795 data->decl = decl;
796 data->off = byteoff;
797 data->minlen = ssize_int (len);
798 return NULL_TREE;
801 return ssize_int (len);
804 /* Return a constant integer corresponding to target reading
805 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
807 static rtx
808 c_readstr (const char *str, scalar_int_mode mode)
810 HOST_WIDE_INT ch;
811 unsigned int i, j;
812 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
814 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
815 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
816 / HOST_BITS_PER_WIDE_INT;
818 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
819 for (i = 0; i < len; i++)
820 tmp[i] = 0;
822 ch = 1;
823 for (i = 0; i < GET_MODE_SIZE (mode); i++)
825 j = i;
826 if (WORDS_BIG_ENDIAN)
827 j = GET_MODE_SIZE (mode) - i - 1;
828 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
829 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
830 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
831 j *= BITS_PER_UNIT;
833 if (ch)
834 ch = (unsigned char) str[i];
835 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
838 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
839 return immed_wide_int_const (c, mode);
842 /* Cast a target constant CST to target CHAR and if that value fits into
843 host char type, return zero and put that value into variable pointed to by
844 P. */
846 static int
847 target_char_cast (tree cst, char *p)
849 unsigned HOST_WIDE_INT val, hostval;
851 if (TREE_CODE (cst) != INTEGER_CST
852 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
853 return 1;
855 /* Do not care if it fits or not right here. */
856 val = TREE_INT_CST_LOW (cst);
858 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
859 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
861 hostval = val;
862 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
863 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
865 if (val != hostval)
866 return 1;
868 *p = hostval;
869 return 0;
872 /* Similar to save_expr, but assumes that arbitrary code is not executed
873 in between the multiple evaluations. In particular, we assume that a
874 non-addressable local variable will not be modified. */
876 static tree
877 builtin_save_expr (tree exp)
879 if (TREE_CODE (exp) == SSA_NAME
880 || (TREE_ADDRESSABLE (exp) == 0
881 && (TREE_CODE (exp) == PARM_DECL
882 || (VAR_P (exp) && !TREE_STATIC (exp)))))
883 return exp;
885 return save_expr (exp);
888 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
889 times to get the address of either a higher stack frame, or a return
890 address located within it (depending on FNDECL_CODE). */
892 static rtx
893 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
895 int i;
896 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
897 if (tem == NULL_RTX)
899 /* For a zero count with __builtin_return_address, we don't care what
900 frame address we return, because target-specific definitions will
901 override us. Therefore frame pointer elimination is OK, and using
902 the soft frame pointer is OK.
904 For a nonzero count, or a zero count with __builtin_frame_address,
905 we require a stable offset from the current frame pointer to the
906 previous one, so we must use the hard frame pointer, and
907 we must disable frame pointer elimination. */
908 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
909 tem = frame_pointer_rtx;
910 else
912 tem = hard_frame_pointer_rtx;
914 /* Tell reload not to eliminate the frame pointer. */
915 crtl->accesses_prior_frames = 1;
919 if (count > 0)
920 SETUP_FRAME_ADDRESSES ();
922 /* On the SPARC, the return address is not in the frame, it is in a
923 register. There is no way to access it off of the current frame
924 pointer, but it can be accessed off the previous frame pointer by
925 reading the value from the register window save area. */
926 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
927 count--;
929 /* Scan back COUNT frames to the specified frame. */
930 for (i = 0; i < count; i++)
932 /* Assume the dynamic chain pointer is in the word that the
933 frame address points to, unless otherwise specified. */
934 tem = DYNAMIC_CHAIN_ADDRESS (tem);
935 tem = memory_address (Pmode, tem);
936 tem = gen_frame_mem (Pmode, tem);
937 tem = copy_to_reg (tem);
940 /* For __builtin_frame_address, return what we've got. But, on
941 the SPARC for example, we may have to add a bias. */
942 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
943 return FRAME_ADDR_RTX (tem);
945 /* For __builtin_return_address, get the return address from that frame. */
946 #ifdef RETURN_ADDR_RTX
947 tem = RETURN_ADDR_RTX (count, tem);
948 #else
949 tem = memory_address (Pmode,
950 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
951 tem = gen_frame_mem (Pmode, tem);
952 #endif
953 return tem;
956 /* Alias set used for setjmp buffer. */
957 static alias_set_type setjmp_alias_set = -1;
959 /* Construct the leading half of a __builtin_setjmp call. Control will
960 return to RECEIVER_LABEL. This is also called directly by the SJLJ
961 exception handling code. */
963 void
964 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
966 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
967 rtx stack_save;
968 rtx mem;
970 if (setjmp_alias_set == -1)
971 setjmp_alias_set = new_alias_set ();
973 buf_addr = convert_memory_address (Pmode, buf_addr);
975 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
977 /* We store the frame pointer and the address of receiver_label in
978 the buffer and use the rest of it for the stack save area, which
979 is machine-dependent. */
981 mem = gen_rtx_MEM (Pmode, buf_addr);
982 set_mem_alias_set (mem, setjmp_alias_set);
983 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
985 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
986 GET_MODE_SIZE (Pmode))),
987 set_mem_alias_set (mem, setjmp_alias_set);
989 emit_move_insn (validize_mem (mem),
990 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
992 stack_save = gen_rtx_MEM (sa_mode,
993 plus_constant (Pmode, buf_addr,
994 2 * GET_MODE_SIZE (Pmode)));
995 set_mem_alias_set (stack_save, setjmp_alias_set);
996 emit_stack_save (SAVE_NONLOCAL, &stack_save);
998 /* If there is further processing to do, do it. */
999 if (targetm.have_builtin_setjmp_setup ())
1000 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
1002 /* We have a nonlocal label. */
1003 cfun->has_nonlocal_label = 1;
1006 /* Construct the trailing part of a __builtin_setjmp call. This is
1007 also called directly by the SJLJ exception handling code.
1008 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
1010 void
1011 expand_builtin_setjmp_receiver (rtx receiver_label)
1013 rtx chain;
1015 /* Mark the FP as used when we get here, so we have to make sure it's
1016 marked as used by this function. */
1017 emit_use (hard_frame_pointer_rtx);
1019 /* Mark the static chain as clobbered here so life information
1020 doesn't get messed up for it. */
1021 chain = rtx_for_static_chain (current_function_decl, true);
1022 if (chain && REG_P (chain))
1023 emit_clobber (chain);
1025 /* Now put in the code to restore the frame pointer, and argument
1026 pointer, if needed. */
1027 if (! targetm.have_nonlocal_goto ())
1029 /* First adjust our frame pointer to its actual value. It was
1030 previously set to the start of the virtual area corresponding to
1031 the stacked variables when we branched here and now needs to be
1032 adjusted to the actual hardware fp value.
1034 Assignments to virtual registers are converted by
1035 instantiate_virtual_regs into the corresponding assignment
1036 to the underlying register (fp in this case) that makes
1037 the original assignment true.
1038 So the following insn will actually be decrementing fp by
1039 TARGET_STARTING_FRAME_OFFSET. */
1040 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
1042 /* Restoring the frame pointer also modifies the hard frame pointer.
1043 Mark it used (so that the previous assignment remains live once
1044 the frame pointer is eliminated) and clobbered (to represent the
1045 implicit update from the assignment). */
1046 emit_use (hard_frame_pointer_rtx);
1047 emit_clobber (hard_frame_pointer_rtx);
1050 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
1052 /* If the argument pointer can be eliminated in favor of the
1053 frame pointer, we don't need to restore it. We assume here
1054 that if such an elimination is present, it can always be used.
1055 This is the case on all known machines; if we don't make this
1056 assumption, we do unnecessary saving on many machines. */
1057 size_t i;
1058 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1060 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1061 if (elim_regs[i].from == ARG_POINTER_REGNUM
1062 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1063 break;
1065 if (i == ARRAY_SIZE (elim_regs))
1067 /* Now restore our arg pointer from the address at which it
1068 was saved in our stack frame. */
1069 emit_move_insn (crtl->args.internal_arg_pointer,
1070 copy_to_reg (get_arg_pointer_save_area ()));
1074 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
1075 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
1076 else if (targetm.have_nonlocal_goto_receiver ())
1077 emit_insn (targetm.gen_nonlocal_goto_receiver ());
1078 else
1079 { /* Nothing */ }
1081 /* We must not allow the code we just generated to be reordered by
1082 scheduling. Specifically, the update of the frame pointer must
1083 happen immediately, not later. */
1084 emit_insn (gen_blockage ());
1087 /* __builtin_longjmp is passed a pointer to an array of five words (not
1088 all will be used on all machines). It operates similarly to the C
1089 library function of the same name, but is more efficient. Much of
1090 the code below is copied from the handling of non-local gotos. */
1092 static void
1093 expand_builtin_longjmp (rtx buf_addr, rtx value)
1095 rtx fp, lab, stack;
1096 rtx_insn *insn, *last;
1097 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1099 /* DRAP is needed for stack realign if longjmp is expanded to current
1100 function */
1101 if (SUPPORTS_STACK_ALIGNMENT)
1102 crtl->need_drap = true;
1104 if (setjmp_alias_set == -1)
1105 setjmp_alias_set = new_alias_set ();
1107 buf_addr = convert_memory_address (Pmode, buf_addr);
1109 buf_addr = force_reg (Pmode, buf_addr);
1111 /* We require that the user must pass a second argument of 1, because
1112 that is what builtin_setjmp will return. */
1113 gcc_assert (value == const1_rtx);
1115 last = get_last_insn ();
1116 if (targetm.have_builtin_longjmp ())
1117 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1118 else
1120 fp = gen_rtx_MEM (Pmode, buf_addr);
1121 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1122 GET_MODE_SIZE (Pmode)));
1124 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1125 2 * GET_MODE_SIZE (Pmode)));
1126 set_mem_alias_set (fp, setjmp_alias_set);
1127 set_mem_alias_set (lab, setjmp_alias_set);
1128 set_mem_alias_set (stack, setjmp_alias_set);
1130 /* Pick up FP, label, and SP from the block and jump. This code is
1131 from expand_goto in stmt.c; see there for detailed comments. */
1132 if (targetm.have_nonlocal_goto ())
1133 /* We have to pass a value to the nonlocal_goto pattern that will
1134 get copied into the static_chain pointer, but it does not matter
1135 what that value is, because builtin_setjmp does not use it. */
1136 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1137 else
1139 lab = copy_to_reg (lab);
1141 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1142 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1144 /* Restore the frame pointer and stack pointer. We must use a
1145 temporary since the setjmp buffer may be a local. */
1146 fp = copy_to_reg (fp);
1147 emit_stack_restore (SAVE_NONLOCAL, stack);
1148 emit_move_insn (hard_frame_pointer_rtx, fp);
1150 emit_use (hard_frame_pointer_rtx);
1151 emit_use (stack_pointer_rtx);
1152 emit_indirect_jump (lab);
1156 /* Search backwards and mark the jump insn as a non-local goto.
1157 Note that this precludes the use of __builtin_longjmp to a
1158 __builtin_setjmp target in the same function. However, we've
1159 already cautioned the user that these functions are for
1160 internal exception handling use only. */
1161 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1163 gcc_assert (insn != last);
1165 if (JUMP_P (insn))
1167 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1168 break;
1170 else if (CALL_P (insn))
1171 break;
1175 static inline bool
1176 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1178 return (iter->i < iter->n);
1181 /* This function validates the types of a function call argument list
1182 against a specified list of tree_codes. If the last specifier is a 0,
1183 that represents an ellipsis, otherwise the last specifier must be a
1184 VOID_TYPE. */
1186 static bool
1187 validate_arglist (const_tree callexpr, ...)
1189 enum tree_code code;
1190 bool res = 0;
1191 va_list ap;
1192 const_call_expr_arg_iterator iter;
1193 const_tree arg;
1195 va_start (ap, callexpr);
1196 init_const_call_expr_arg_iterator (callexpr, &iter);
1198 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1199 tree fn = CALL_EXPR_FN (callexpr);
1200 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1202 for (unsigned argno = 1; ; ++argno)
1204 code = (enum tree_code) va_arg (ap, int);
1206 switch (code)
1208 case 0:
1209 /* This signifies an ellipses, any further arguments are all ok. */
1210 res = true;
1211 goto end;
1212 case VOID_TYPE:
1213 /* This signifies an endlink, if no arguments remain, return
1214 true, otherwise return false. */
1215 res = !more_const_call_expr_args_p (&iter);
1216 goto end;
1217 case POINTER_TYPE:
1218 /* The actual argument must be nonnull when either the whole
1219 called function has been declared nonnull, or when the formal
1220 argument corresponding to the actual argument has been. */
1221 if (argmap
1222 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1224 arg = next_const_call_expr_arg (&iter);
1225 if (!validate_arg (arg, code) || integer_zerop (arg))
1226 goto end;
1227 break;
1229 /* FALLTHRU */
1230 default:
1231 /* If no parameters remain or the parameter's code does not
1232 match the specified code, return false. Otherwise continue
1233 checking any remaining arguments. */
1234 arg = next_const_call_expr_arg (&iter);
1235 if (!validate_arg (arg, code))
1236 goto end;
1237 break;
1241 /* We need gotos here since we can only have one VA_CLOSE in a
1242 function. */
1243 end: ;
1244 va_end (ap);
1246 BITMAP_FREE (argmap);
1248 return res;
1251 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1252 and the address of the save area. */
1254 static rtx
1255 expand_builtin_nonlocal_goto (tree exp)
1257 tree t_label, t_save_area;
1258 rtx r_label, r_save_area, r_fp, r_sp;
1259 rtx_insn *insn;
1261 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1262 return NULL_RTX;
1264 t_label = CALL_EXPR_ARG (exp, 0);
1265 t_save_area = CALL_EXPR_ARG (exp, 1);
1267 r_label = expand_normal (t_label);
1268 r_label = convert_memory_address (Pmode, r_label);
1269 r_save_area = expand_normal (t_save_area);
1270 r_save_area = convert_memory_address (Pmode, r_save_area);
1271 /* Copy the address of the save location to a register just in case it was
1272 based on the frame pointer. */
1273 r_save_area = copy_to_reg (r_save_area);
1274 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1275 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1276 plus_constant (Pmode, r_save_area,
1277 GET_MODE_SIZE (Pmode)));
1279 crtl->has_nonlocal_goto = 1;
1281 /* ??? We no longer need to pass the static chain value, afaik. */
1282 if (targetm.have_nonlocal_goto ())
1283 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1284 else
1286 r_label = copy_to_reg (r_label);
1288 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1289 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1291 /* Restore the frame pointer and stack pointer. We must use a
1292 temporary since the setjmp buffer may be a local. */
1293 r_fp = copy_to_reg (r_fp);
1294 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1295 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1297 /* USE of hard_frame_pointer_rtx added for consistency;
1298 not clear if really needed. */
1299 emit_use (hard_frame_pointer_rtx);
1300 emit_use (stack_pointer_rtx);
1302 /* If the architecture is using a GP register, we must
1303 conservatively assume that the target function makes use of it.
1304 The prologue of functions with nonlocal gotos must therefore
1305 initialize the GP register to the appropriate value, and we
1306 must then make sure that this value is live at the point
1307 of the jump. (Note that this doesn't necessarily apply
1308 to targets with a nonlocal_goto pattern; they are free
1309 to implement it in their own way. Note also that this is
1310 a no-op if the GP register is a global invariant.) */
1311 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1312 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1313 emit_use (pic_offset_table_rtx);
1315 emit_indirect_jump (r_label);
1318 /* Search backwards to the jump insn and mark it as a
1319 non-local goto. */
1320 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1322 if (JUMP_P (insn))
1324 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1325 break;
1327 else if (CALL_P (insn))
1328 break;
1331 return const0_rtx;
1334 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1335 (not all will be used on all machines) that was passed to __builtin_setjmp.
1336 It updates the stack pointer in that block to the current value. This is
1337 also called directly by the SJLJ exception handling code. */
1339 void
1340 expand_builtin_update_setjmp_buf (rtx buf_addr)
1342 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1343 buf_addr = convert_memory_address (Pmode, buf_addr);
1344 rtx stack_save
1345 = gen_rtx_MEM (sa_mode,
1346 memory_address
1347 (sa_mode,
1348 plus_constant (Pmode, buf_addr,
1349 2 * GET_MODE_SIZE (Pmode))));
1351 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1354 /* Expand a call to __builtin_prefetch. For a target that does not support
1355 data prefetch, evaluate the memory address argument in case it has side
1356 effects. */
1358 static void
1359 expand_builtin_prefetch (tree exp)
1361 tree arg0, arg1, arg2;
1362 int nargs;
1363 rtx op0, op1, op2;
1365 if (!validate_arglist (exp, POINTER_TYPE, 0))
1366 return;
1368 arg0 = CALL_EXPR_ARG (exp, 0);
1370 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1371 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1372 locality). */
1373 nargs = call_expr_nargs (exp);
1374 if (nargs > 1)
1375 arg1 = CALL_EXPR_ARG (exp, 1);
1376 else
1377 arg1 = integer_zero_node;
1378 if (nargs > 2)
1379 arg2 = CALL_EXPR_ARG (exp, 2);
1380 else
1381 arg2 = integer_three_node;
1383 /* Argument 0 is an address. */
1384 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1386 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1387 if (TREE_CODE (arg1) != INTEGER_CST)
1389 error ("second argument to %<__builtin_prefetch%> must be a constant");
1390 arg1 = integer_zero_node;
1392 op1 = expand_normal (arg1);
1393 /* Argument 1 must be either zero or one. */
1394 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1396 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1397 " using zero");
1398 op1 = const0_rtx;
1401 /* Argument 2 (locality) must be a compile-time constant int. */
1402 if (TREE_CODE (arg2) != INTEGER_CST)
1404 error ("third argument to %<__builtin_prefetch%> must be a constant");
1405 arg2 = integer_zero_node;
1407 op2 = expand_normal (arg2);
1408 /* Argument 2 must be 0, 1, 2, or 3. */
1409 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1411 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1412 op2 = const0_rtx;
1415 if (targetm.have_prefetch ())
1417 struct expand_operand ops[3];
1419 create_address_operand (&ops[0], op0);
1420 create_integer_operand (&ops[1], INTVAL (op1));
1421 create_integer_operand (&ops[2], INTVAL (op2));
1422 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1423 return;
1426 /* Don't do anything with direct references to volatile memory, but
1427 generate code to handle other side effects. */
1428 if (!MEM_P (op0) && side_effects_p (op0))
1429 emit_insn (op0);
1432 /* Get a MEM rtx for expression EXP which is the address of an operand
1433 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1434 the maximum length of the block of memory that might be accessed or
1435 NULL if unknown. */
1437 static rtx
1438 get_memory_rtx (tree exp, tree len)
1440 tree orig_exp = exp;
1441 rtx addr, mem;
1443 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1444 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1445 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1446 exp = TREE_OPERAND (exp, 0);
1448 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1449 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1451 /* Get an expression we can use to find the attributes to assign to MEM.
1452 First remove any nops. */
1453 while (CONVERT_EXPR_P (exp)
1454 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1455 exp = TREE_OPERAND (exp, 0);
1457 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1458 (as builtin stringops may alias with anything). */
1459 exp = fold_build2 (MEM_REF,
1460 build_array_type (char_type_node,
1461 build_range_type (sizetype,
1462 size_one_node, len)),
1463 exp, build_int_cst (ptr_type_node, 0));
1465 /* If the MEM_REF has no acceptable address, try to get the base object
1466 from the original address we got, and build an all-aliasing
1467 unknown-sized access to that one. */
1468 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1469 set_mem_attributes (mem, exp, 0);
1470 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1471 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1472 0))))
1474 exp = build_fold_addr_expr (exp);
1475 exp = fold_build2 (MEM_REF,
1476 build_array_type (char_type_node,
1477 build_range_type (sizetype,
1478 size_zero_node,
1479 NULL)),
1480 exp, build_int_cst (ptr_type_node, 0));
1481 set_mem_attributes (mem, exp, 0);
1483 set_mem_alias_set (mem, 0);
1484 return mem;
1487 /* Built-in functions to perform an untyped call and return. */
1489 #define apply_args_mode \
1490 (this_target_builtins->x_apply_args_mode)
1491 #define apply_result_mode \
1492 (this_target_builtins->x_apply_result_mode)
1494 /* Return the size required for the block returned by __builtin_apply_args,
1495 and initialize apply_args_mode. */
1497 static int
1498 apply_args_size (void)
1500 static int size = -1;
1501 int align;
1502 unsigned int regno;
1504 /* The values computed by this function never change. */
1505 if (size < 0)
1507 /* The first value is the incoming arg-pointer. */
1508 size = GET_MODE_SIZE (Pmode);
1510 /* The second value is the structure value address unless this is
1511 passed as an "invisible" first argument. */
1512 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1513 size += GET_MODE_SIZE (Pmode);
1515 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1516 if (FUNCTION_ARG_REGNO_P (regno))
1518 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1520 gcc_assert (mode != VOIDmode);
1522 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1523 if (size % align != 0)
1524 size = CEIL (size, align) * align;
1525 size += GET_MODE_SIZE (mode);
1526 apply_args_mode[regno] = mode;
1528 else
1530 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1533 return size;
1536 /* Return the size required for the block returned by __builtin_apply,
1537 and initialize apply_result_mode. */
1539 static int
1540 apply_result_size (void)
1542 static int size = -1;
1543 int align, regno;
1545 /* The values computed by this function never change. */
1546 if (size < 0)
1548 size = 0;
1550 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1551 if (targetm.calls.function_value_regno_p (regno))
1553 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1555 gcc_assert (mode != VOIDmode);
1557 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1558 if (size % align != 0)
1559 size = CEIL (size, align) * align;
1560 size += GET_MODE_SIZE (mode);
1561 apply_result_mode[regno] = mode;
1563 else
1564 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1566 /* Allow targets that use untyped_call and untyped_return to override
1567 the size so that machine-specific information can be stored here. */
1568 #ifdef APPLY_RESULT_SIZE
1569 size = APPLY_RESULT_SIZE;
1570 #endif
1572 return size;
1575 /* Create a vector describing the result block RESULT. If SAVEP is true,
1576 the result block is used to save the values; otherwise it is used to
1577 restore the values. */
1579 static rtx
1580 result_vector (int savep, rtx result)
1582 int regno, size, align, nelts;
1583 fixed_size_mode mode;
1584 rtx reg, mem;
1585 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1587 size = nelts = 0;
1588 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1589 if ((mode = apply_result_mode[regno]) != VOIDmode)
1591 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1592 if (size % align != 0)
1593 size = CEIL (size, align) * align;
1594 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1595 mem = adjust_address (result, mode, size);
1596 savevec[nelts++] = (savep
1597 ? gen_rtx_SET (mem, reg)
1598 : gen_rtx_SET (reg, mem));
1599 size += GET_MODE_SIZE (mode);
1601 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1604 /* Save the state required to perform an untyped call with the same
1605 arguments as were passed to the current function. */
1607 static rtx
1608 expand_builtin_apply_args_1 (void)
1610 rtx registers, tem;
1611 int size, align, regno;
1612 fixed_size_mode mode;
1613 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1615 /* Create a block where the arg-pointer, structure value address,
1616 and argument registers can be saved. */
1617 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1619 /* Walk past the arg-pointer and structure value address. */
1620 size = GET_MODE_SIZE (Pmode);
1621 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1622 size += GET_MODE_SIZE (Pmode);
1624 /* Save each register used in calling a function to the block. */
1625 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1626 if ((mode = apply_args_mode[regno]) != VOIDmode)
1628 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1629 if (size % align != 0)
1630 size = CEIL (size, align) * align;
1632 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1634 emit_move_insn (adjust_address (registers, mode, size), tem);
1635 size += GET_MODE_SIZE (mode);
1638 /* Save the arg pointer to the block. */
1639 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1640 /* We need the pointer as the caller actually passed them to us, not
1641 as we might have pretended they were passed. Make sure it's a valid
1642 operand, as emit_move_insn isn't expected to handle a PLUS. */
1643 if (STACK_GROWS_DOWNWARD)
1645 = force_operand (plus_constant (Pmode, tem,
1646 crtl->args.pretend_args_size),
1647 NULL_RTX);
1648 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1650 size = GET_MODE_SIZE (Pmode);
1652 /* Save the structure value address unless this is passed as an
1653 "invisible" first argument. */
1654 if (struct_incoming_value)
1656 emit_move_insn (adjust_address (registers, Pmode, size),
1657 copy_to_reg (struct_incoming_value));
1658 size += GET_MODE_SIZE (Pmode);
1661 /* Return the address of the block. */
1662 return copy_addr_to_reg (XEXP (registers, 0));
1665 /* __builtin_apply_args returns block of memory allocated on
1666 the stack into which is stored the arg pointer, structure
1667 value address, static chain, and all the registers that might
1668 possibly be used in performing a function call. The code is
1669 moved to the start of the function so the incoming values are
1670 saved. */
1672 static rtx
1673 expand_builtin_apply_args (void)
1675 /* Don't do __builtin_apply_args more than once in a function.
1676 Save the result of the first call and reuse it. */
1677 if (apply_args_value != 0)
1678 return apply_args_value;
1680 /* When this function is called, it means that registers must be
1681 saved on entry to this function. So we migrate the
1682 call to the first insn of this function. */
1683 rtx temp;
1685 start_sequence ();
1686 temp = expand_builtin_apply_args_1 ();
1687 rtx_insn *seq = get_insns ();
1688 end_sequence ();
1690 apply_args_value = temp;
1692 /* Put the insns after the NOTE that starts the function.
1693 If this is inside a start_sequence, make the outer-level insn
1694 chain current, so the code is placed at the start of the
1695 function. If internal_arg_pointer is a non-virtual pseudo,
1696 it needs to be placed after the function that initializes
1697 that pseudo. */
1698 push_topmost_sequence ();
1699 if (REG_P (crtl->args.internal_arg_pointer)
1700 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1701 emit_insn_before (seq, parm_birth_insn);
1702 else
1703 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1704 pop_topmost_sequence ();
1705 return temp;
1709 /* Perform an untyped call and save the state required to perform an
1710 untyped return of whatever value was returned by the given function. */
1712 static rtx
1713 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1715 int size, align, regno;
1716 fixed_size_mode mode;
1717 rtx incoming_args, result, reg, dest, src;
1718 rtx_call_insn *call_insn;
1719 rtx old_stack_level = 0;
1720 rtx call_fusage = 0;
1721 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1723 arguments = convert_memory_address (Pmode, arguments);
1725 /* Create a block where the return registers can be saved. */
1726 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1728 /* Fetch the arg pointer from the ARGUMENTS block. */
1729 incoming_args = gen_reg_rtx (Pmode);
1730 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1731 if (!STACK_GROWS_DOWNWARD)
1732 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1733 incoming_args, 0, OPTAB_LIB_WIDEN);
1735 /* Push a new argument block and copy the arguments. Do not allow
1736 the (potential) memcpy call below to interfere with our stack
1737 manipulations. */
1738 do_pending_stack_adjust ();
1739 NO_DEFER_POP;
1741 /* Save the stack with nonlocal if available. */
1742 if (targetm.have_save_stack_nonlocal ())
1743 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1744 else
1745 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1747 /* Allocate a block of memory onto the stack and copy the memory
1748 arguments to the outgoing arguments address. We can pass TRUE
1749 as the 4th argument because we just saved the stack pointer
1750 and will restore it right after the call. */
1751 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1753 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1754 may have already set current_function_calls_alloca to true.
1755 current_function_calls_alloca won't be set if argsize is zero,
1756 so we have to guarantee need_drap is true here. */
1757 if (SUPPORTS_STACK_ALIGNMENT)
1758 crtl->need_drap = true;
1760 dest = virtual_outgoing_args_rtx;
1761 if (!STACK_GROWS_DOWNWARD)
1763 if (CONST_INT_P (argsize))
1764 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1765 else
1766 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1768 dest = gen_rtx_MEM (BLKmode, dest);
1769 set_mem_align (dest, PARM_BOUNDARY);
1770 src = gen_rtx_MEM (BLKmode, incoming_args);
1771 set_mem_align (src, PARM_BOUNDARY);
1772 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1774 /* Refer to the argument block. */
1775 apply_args_size ();
1776 arguments = gen_rtx_MEM (BLKmode, arguments);
1777 set_mem_align (arguments, PARM_BOUNDARY);
1779 /* Walk past the arg-pointer and structure value address. */
1780 size = GET_MODE_SIZE (Pmode);
1781 if (struct_value)
1782 size += GET_MODE_SIZE (Pmode);
1784 /* Restore each of the registers previously saved. Make USE insns
1785 for each of these registers for use in making the call. */
1786 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1787 if ((mode = apply_args_mode[regno]) != VOIDmode)
1789 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1790 if (size % align != 0)
1791 size = CEIL (size, align) * align;
1792 reg = gen_rtx_REG (mode, regno);
1793 emit_move_insn (reg, adjust_address (arguments, mode, size));
1794 use_reg (&call_fusage, reg);
1795 size += GET_MODE_SIZE (mode);
1798 /* Restore the structure value address unless this is passed as an
1799 "invisible" first argument. */
1800 size = GET_MODE_SIZE (Pmode);
1801 if (struct_value)
1803 rtx value = gen_reg_rtx (Pmode);
1804 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1805 emit_move_insn (struct_value, value);
1806 if (REG_P (struct_value))
1807 use_reg (&call_fusage, struct_value);
1808 size += GET_MODE_SIZE (Pmode);
1811 /* All arguments and registers used for the call are set up by now! */
1812 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1814 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1815 and we don't want to load it into a register as an optimization,
1816 because prepare_call_address already did it if it should be done. */
1817 if (GET_CODE (function) != SYMBOL_REF)
1818 function = memory_address (FUNCTION_MODE, function);
1820 /* Generate the actual call instruction and save the return value. */
1821 if (targetm.have_untyped_call ())
1823 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1824 emit_call_insn (targetm.gen_untyped_call (mem, result,
1825 result_vector (1, result)));
1827 else if (targetm.have_call_value ())
1829 rtx valreg = 0;
1831 /* Locate the unique return register. It is not possible to
1832 express a call that sets more than one return register using
1833 call_value; use untyped_call for that. In fact, untyped_call
1834 only needs to save the return registers in the given block. */
1835 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1836 if ((mode = apply_result_mode[regno]) != VOIDmode)
1838 gcc_assert (!valreg); /* have_untyped_call required. */
1840 valreg = gen_rtx_REG (mode, regno);
1843 emit_insn (targetm.gen_call_value (valreg,
1844 gen_rtx_MEM (FUNCTION_MODE, function),
1845 const0_rtx, NULL_RTX, const0_rtx));
1847 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1849 else
1850 gcc_unreachable ();
1852 /* Find the CALL insn we just emitted, and attach the register usage
1853 information. */
1854 call_insn = last_call_insn ();
1855 add_function_usage_to (call_insn, call_fusage);
1857 /* Restore the stack. */
1858 if (targetm.have_save_stack_nonlocal ())
1859 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1860 else
1861 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1862 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1864 OK_DEFER_POP;
1866 /* Return the address of the result block. */
1867 result = copy_addr_to_reg (XEXP (result, 0));
1868 return convert_memory_address (ptr_mode, result);
1871 /* Perform an untyped return. */
1873 static void
1874 expand_builtin_return (rtx result)
1876 int size, align, regno;
1877 fixed_size_mode mode;
1878 rtx reg;
1879 rtx_insn *call_fusage = 0;
1881 result = convert_memory_address (Pmode, result);
1883 apply_result_size ();
1884 result = gen_rtx_MEM (BLKmode, result);
1886 if (targetm.have_untyped_return ())
1888 rtx vector = result_vector (0, result);
1889 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1890 emit_barrier ();
1891 return;
1894 /* Restore the return value and note that each value is used. */
1895 size = 0;
1896 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1897 if ((mode = apply_result_mode[regno]) != VOIDmode)
1899 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1900 if (size % align != 0)
1901 size = CEIL (size, align) * align;
1902 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1903 emit_move_insn (reg, adjust_address (result, mode, size));
1905 push_to_sequence (call_fusage);
1906 emit_use (reg);
1907 call_fusage = get_insns ();
1908 end_sequence ();
1909 size += GET_MODE_SIZE (mode);
1912 /* Put the USE insns before the return. */
1913 emit_insn (call_fusage);
1915 /* Return whatever values was restored by jumping directly to the end
1916 of the function. */
1917 expand_naked_return ();
1920 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1922 static enum type_class
1923 type_to_class (tree type)
1925 switch (TREE_CODE (type))
1927 case VOID_TYPE: return void_type_class;
1928 case INTEGER_TYPE: return integer_type_class;
1929 case ENUMERAL_TYPE: return enumeral_type_class;
1930 case BOOLEAN_TYPE: return boolean_type_class;
1931 case POINTER_TYPE: return pointer_type_class;
1932 case REFERENCE_TYPE: return reference_type_class;
1933 case OFFSET_TYPE: return offset_type_class;
1934 case REAL_TYPE: return real_type_class;
1935 case COMPLEX_TYPE: return complex_type_class;
1936 case FUNCTION_TYPE: return function_type_class;
1937 case METHOD_TYPE: return method_type_class;
1938 case RECORD_TYPE: return record_type_class;
1939 case UNION_TYPE:
1940 case QUAL_UNION_TYPE: return union_type_class;
1941 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1942 ? string_type_class : array_type_class);
1943 case LANG_TYPE: return lang_type_class;
1944 default: return no_type_class;
1948 /* Expand a call EXP to __builtin_classify_type. */
1950 static rtx
1951 expand_builtin_classify_type (tree exp)
1953 if (call_expr_nargs (exp))
1954 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1955 return GEN_INT (no_type_class);
1958 /* This helper macro, meant to be used in mathfn_built_in below, determines
1959 which among a set of builtin math functions is appropriate for a given type
1960 mode. The `F' (float) and `L' (long double) are automatically generated
1961 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1962 types, there are additional types that are considered with 'F32', 'F64',
1963 'F128', etc. suffixes. */
1964 #define CASE_MATHFN(MATHFN) \
1965 CASE_CFN_##MATHFN: \
1966 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1967 fcodel = BUILT_IN_##MATHFN##L ; break;
1968 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1969 types. */
1970 #define CASE_MATHFN_FLOATN(MATHFN) \
1971 CASE_CFN_##MATHFN: \
1972 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1973 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1974 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1975 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1976 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1977 break;
1978 /* Similar to above, but appends _R after any F/L suffix. */
1979 #define CASE_MATHFN_REENT(MATHFN) \
1980 case CFN_BUILT_IN_##MATHFN##_R: \
1981 case CFN_BUILT_IN_##MATHFN##F_R: \
1982 case CFN_BUILT_IN_##MATHFN##L_R: \
1983 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1984 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1986 /* Return a function equivalent to FN but operating on floating-point
1987 values of type TYPE, or END_BUILTINS if no such function exists.
1988 This is purely an operation on function codes; it does not guarantee
1989 that the target actually has an implementation of the function. */
1991 static built_in_function
1992 mathfn_built_in_2 (tree type, combined_fn fn)
1994 tree mtype;
1995 built_in_function fcode, fcodef, fcodel;
1996 built_in_function fcodef16 = END_BUILTINS;
1997 built_in_function fcodef32 = END_BUILTINS;
1998 built_in_function fcodef64 = END_BUILTINS;
1999 built_in_function fcodef128 = END_BUILTINS;
2000 built_in_function fcodef32x = END_BUILTINS;
2001 built_in_function fcodef64x = END_BUILTINS;
2002 built_in_function fcodef128x = END_BUILTINS;
2004 switch (fn)
2006 CASE_MATHFN (ACOS)
2007 CASE_MATHFN (ACOSH)
2008 CASE_MATHFN (ASIN)
2009 CASE_MATHFN (ASINH)
2010 CASE_MATHFN (ATAN)
2011 CASE_MATHFN (ATAN2)
2012 CASE_MATHFN (ATANH)
2013 CASE_MATHFN (CBRT)
2014 CASE_MATHFN_FLOATN (CEIL)
2015 CASE_MATHFN (CEXPI)
2016 CASE_MATHFN_FLOATN (COPYSIGN)
2017 CASE_MATHFN (COS)
2018 CASE_MATHFN (COSH)
2019 CASE_MATHFN (DREM)
2020 CASE_MATHFN (ERF)
2021 CASE_MATHFN (ERFC)
2022 CASE_MATHFN (EXP)
2023 CASE_MATHFN (EXP10)
2024 CASE_MATHFN (EXP2)
2025 CASE_MATHFN (EXPM1)
2026 CASE_MATHFN (FABS)
2027 CASE_MATHFN (FDIM)
2028 CASE_MATHFN_FLOATN (FLOOR)
2029 CASE_MATHFN_FLOATN (FMA)
2030 CASE_MATHFN_FLOATN (FMAX)
2031 CASE_MATHFN_FLOATN (FMIN)
2032 CASE_MATHFN (FMOD)
2033 CASE_MATHFN (FREXP)
2034 CASE_MATHFN (GAMMA)
2035 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
2036 CASE_MATHFN (HUGE_VAL)
2037 CASE_MATHFN (HYPOT)
2038 CASE_MATHFN (ILOGB)
2039 CASE_MATHFN (ICEIL)
2040 CASE_MATHFN (IFLOOR)
2041 CASE_MATHFN (INF)
2042 CASE_MATHFN (IRINT)
2043 CASE_MATHFN (IROUND)
2044 CASE_MATHFN (ISINF)
2045 CASE_MATHFN (J0)
2046 CASE_MATHFN (J1)
2047 CASE_MATHFN (JN)
2048 CASE_MATHFN (LCEIL)
2049 CASE_MATHFN (LDEXP)
2050 CASE_MATHFN (LFLOOR)
2051 CASE_MATHFN (LGAMMA)
2052 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
2053 CASE_MATHFN (LLCEIL)
2054 CASE_MATHFN (LLFLOOR)
2055 CASE_MATHFN (LLRINT)
2056 CASE_MATHFN (LLROUND)
2057 CASE_MATHFN (LOG)
2058 CASE_MATHFN (LOG10)
2059 CASE_MATHFN (LOG1P)
2060 CASE_MATHFN (LOG2)
2061 CASE_MATHFN (LOGB)
2062 CASE_MATHFN (LRINT)
2063 CASE_MATHFN (LROUND)
2064 CASE_MATHFN (MODF)
2065 CASE_MATHFN (NAN)
2066 CASE_MATHFN (NANS)
2067 CASE_MATHFN_FLOATN (NEARBYINT)
2068 CASE_MATHFN (NEXTAFTER)
2069 CASE_MATHFN (NEXTTOWARD)
2070 CASE_MATHFN (POW)
2071 CASE_MATHFN (POWI)
2072 CASE_MATHFN (POW10)
2073 CASE_MATHFN (REMAINDER)
2074 CASE_MATHFN (REMQUO)
2075 CASE_MATHFN_FLOATN (RINT)
2076 CASE_MATHFN_FLOATN (ROUND)
2077 CASE_MATHFN (SCALB)
2078 CASE_MATHFN (SCALBLN)
2079 CASE_MATHFN (SCALBN)
2080 CASE_MATHFN (SIGNBIT)
2081 CASE_MATHFN (SIGNIFICAND)
2082 CASE_MATHFN (SIN)
2083 CASE_MATHFN (SINCOS)
2084 CASE_MATHFN (SINH)
2085 CASE_MATHFN_FLOATN (SQRT)
2086 CASE_MATHFN (TAN)
2087 CASE_MATHFN (TANH)
2088 CASE_MATHFN (TGAMMA)
2089 CASE_MATHFN_FLOATN (TRUNC)
2090 CASE_MATHFN (Y0)
2091 CASE_MATHFN (Y1)
2092 CASE_MATHFN (YN)
2094 default:
2095 return END_BUILTINS;
2098 mtype = TYPE_MAIN_VARIANT (type);
2099 if (mtype == double_type_node)
2100 return fcode;
2101 else if (mtype == float_type_node)
2102 return fcodef;
2103 else if (mtype == long_double_type_node)
2104 return fcodel;
2105 else if (mtype == float16_type_node)
2106 return fcodef16;
2107 else if (mtype == float32_type_node)
2108 return fcodef32;
2109 else if (mtype == float64_type_node)
2110 return fcodef64;
2111 else if (mtype == float128_type_node)
2112 return fcodef128;
2113 else if (mtype == float32x_type_node)
2114 return fcodef32x;
2115 else if (mtype == float64x_type_node)
2116 return fcodef64x;
2117 else if (mtype == float128x_type_node)
2118 return fcodef128x;
2119 else
2120 return END_BUILTINS;
2123 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2124 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2125 otherwise use the explicit declaration. If we can't do the conversion,
2126 return null. */
2128 static tree
2129 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2131 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2132 if (fcode2 == END_BUILTINS)
2133 return NULL_TREE;
2135 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2136 return NULL_TREE;
2138 return builtin_decl_explicit (fcode2);
2141 /* Like mathfn_built_in_1, but always use the implicit array. */
2143 tree
2144 mathfn_built_in (tree type, combined_fn fn)
2146 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2149 /* Like mathfn_built_in_1, but take a built_in_function and
2150 always use the implicit array. */
2152 tree
2153 mathfn_built_in (tree type, enum built_in_function fn)
2155 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2158 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2159 return its code, otherwise return IFN_LAST. Note that this function
2160 only tests whether the function is defined in internals.def, not whether
2161 it is actually available on the target. */
2163 internal_fn
2164 associated_internal_fn (tree fndecl)
2166 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2167 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2168 switch (DECL_FUNCTION_CODE (fndecl))
2170 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2171 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2172 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2173 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2174 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2175 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2176 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2177 #include "internal-fn.def"
2179 CASE_FLT_FN (BUILT_IN_POW10):
2180 return IFN_EXP10;
2182 CASE_FLT_FN (BUILT_IN_DREM):
2183 return IFN_REMAINDER;
2185 CASE_FLT_FN (BUILT_IN_SCALBN):
2186 CASE_FLT_FN (BUILT_IN_SCALBLN):
2187 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2188 return IFN_LDEXP;
2189 return IFN_LAST;
2191 default:
2192 return IFN_LAST;
2196 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2197 on the current target by a call to an internal function, return the
2198 code of that internal function, otherwise return IFN_LAST. The caller
2199 is responsible for ensuring that any side-effects of the built-in
2200 call are dealt with correctly. E.g. if CALL sets errno, the caller
2201 must decide that the errno result isn't needed or make it available
2202 in some other way. */
2204 internal_fn
2205 replacement_internal_fn (gcall *call)
2207 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2209 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2210 if (ifn != IFN_LAST)
2212 tree_pair types = direct_internal_fn_types (ifn, call);
2213 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2214 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2215 return ifn;
2218 return IFN_LAST;
2221 /* Expand a call to the builtin trinary math functions (fma).
2222 Return NULL_RTX if a normal call should be emitted rather than expanding the
2223 function in-line. EXP is the expression that is a call to the builtin
2224 function; if convenient, the result should be placed in TARGET.
2225 SUBTARGET may be used as the target for computing one of EXP's
2226 operands. */
2228 static rtx
2229 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2231 optab builtin_optab;
2232 rtx op0, op1, op2, result;
2233 rtx_insn *insns;
2234 tree fndecl = get_callee_fndecl (exp);
2235 tree arg0, arg1, arg2;
2236 machine_mode mode;
2238 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2239 return NULL_RTX;
2241 arg0 = CALL_EXPR_ARG (exp, 0);
2242 arg1 = CALL_EXPR_ARG (exp, 1);
2243 arg2 = CALL_EXPR_ARG (exp, 2);
2245 switch (DECL_FUNCTION_CODE (fndecl))
2247 CASE_FLT_FN (BUILT_IN_FMA):
2248 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2249 builtin_optab = fma_optab; break;
2250 default:
2251 gcc_unreachable ();
2254 /* Make a suitable register to place result in. */
2255 mode = TYPE_MODE (TREE_TYPE (exp));
2257 /* Before working hard, check whether the instruction is available. */
2258 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2259 return NULL_RTX;
2261 result = gen_reg_rtx (mode);
2263 /* Always stabilize the argument list. */
2264 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2265 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2266 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2268 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2269 op1 = expand_normal (arg1);
2270 op2 = expand_normal (arg2);
2272 start_sequence ();
2274 /* Compute into RESULT.
2275 Set RESULT to wherever the result comes back. */
2276 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2277 result, 0);
2279 /* If we were unable to expand via the builtin, stop the sequence
2280 (without outputting the insns) and call to the library function
2281 with the stabilized argument list. */
2282 if (result == 0)
2284 end_sequence ();
2285 return expand_call (exp, target, target == const0_rtx);
2288 /* Output the entire sequence. */
2289 insns = get_insns ();
2290 end_sequence ();
2291 emit_insn (insns);
2293 return result;
2296 /* Expand a call to the builtin sin and cos math functions.
2297 Return NULL_RTX if a normal call should be emitted rather than expanding the
2298 function in-line. EXP is the expression that is a call to the builtin
2299 function; if convenient, the result should be placed in TARGET.
2300 SUBTARGET may be used as the target for computing one of EXP's
2301 operands. */
2303 static rtx
2304 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2306 optab builtin_optab;
2307 rtx op0;
2308 rtx_insn *insns;
2309 tree fndecl = get_callee_fndecl (exp);
2310 machine_mode mode;
2311 tree arg;
2313 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2314 return NULL_RTX;
2316 arg = CALL_EXPR_ARG (exp, 0);
2318 switch (DECL_FUNCTION_CODE (fndecl))
2320 CASE_FLT_FN (BUILT_IN_SIN):
2321 CASE_FLT_FN (BUILT_IN_COS):
2322 builtin_optab = sincos_optab; break;
2323 default:
2324 gcc_unreachable ();
2327 /* Make a suitable register to place result in. */
2328 mode = TYPE_MODE (TREE_TYPE (exp));
2330 /* Check if sincos insn is available, otherwise fallback
2331 to sin or cos insn. */
2332 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2333 switch (DECL_FUNCTION_CODE (fndecl))
2335 CASE_FLT_FN (BUILT_IN_SIN):
2336 builtin_optab = sin_optab; break;
2337 CASE_FLT_FN (BUILT_IN_COS):
2338 builtin_optab = cos_optab; break;
2339 default:
2340 gcc_unreachable ();
2343 /* Before working hard, check whether the instruction is available. */
2344 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2346 rtx result = gen_reg_rtx (mode);
2348 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2349 need to expand the argument again. This way, we will not perform
2350 side-effects more the once. */
2351 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2353 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2355 start_sequence ();
2357 /* Compute into RESULT.
2358 Set RESULT to wherever the result comes back. */
2359 if (builtin_optab == sincos_optab)
2361 int ok;
2363 switch (DECL_FUNCTION_CODE (fndecl))
2365 CASE_FLT_FN (BUILT_IN_SIN):
2366 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2367 break;
2368 CASE_FLT_FN (BUILT_IN_COS):
2369 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2370 break;
2371 default:
2372 gcc_unreachable ();
2374 gcc_assert (ok);
2376 else
2377 result = expand_unop (mode, builtin_optab, op0, result, 0);
2379 if (result != 0)
2381 /* Output the entire sequence. */
2382 insns = get_insns ();
2383 end_sequence ();
2384 emit_insn (insns);
2385 return result;
2388 /* If we were unable to expand via the builtin, stop the sequence
2389 (without outputting the insns) and call to the library function
2390 with the stabilized argument list. */
2391 end_sequence ();
2394 return expand_call (exp, target, target == const0_rtx);
2397 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2398 return an RTL instruction code that implements the functionality.
2399 If that isn't possible or available return CODE_FOR_nothing. */
2401 static enum insn_code
2402 interclass_mathfn_icode (tree arg, tree fndecl)
2404 bool errno_set = false;
2405 optab builtin_optab = unknown_optab;
2406 machine_mode mode;
2408 switch (DECL_FUNCTION_CODE (fndecl))
2410 CASE_FLT_FN (BUILT_IN_ILOGB):
2411 errno_set = true; builtin_optab = ilogb_optab; break;
2412 CASE_FLT_FN (BUILT_IN_ISINF):
2413 builtin_optab = isinf_optab; break;
2414 case BUILT_IN_ISNORMAL:
2415 case BUILT_IN_ISFINITE:
2416 CASE_FLT_FN (BUILT_IN_FINITE):
2417 case BUILT_IN_FINITED32:
2418 case BUILT_IN_FINITED64:
2419 case BUILT_IN_FINITED128:
2420 case BUILT_IN_ISINFD32:
2421 case BUILT_IN_ISINFD64:
2422 case BUILT_IN_ISINFD128:
2423 /* These builtins have no optabs (yet). */
2424 break;
2425 default:
2426 gcc_unreachable ();
2429 /* There's no easy way to detect the case we need to set EDOM. */
2430 if (flag_errno_math && errno_set)
2431 return CODE_FOR_nothing;
2433 /* Optab mode depends on the mode of the input argument. */
2434 mode = TYPE_MODE (TREE_TYPE (arg));
2436 if (builtin_optab)
2437 return optab_handler (builtin_optab, mode);
2438 return CODE_FOR_nothing;
2441 /* Expand a call to one of the builtin math functions that operate on
2442 floating point argument and output an integer result (ilogb, isinf,
2443 isnan, etc).
2444 Return 0 if a normal call should be emitted rather than expanding the
2445 function in-line. EXP is the expression that is a call to the builtin
2446 function; if convenient, the result should be placed in TARGET. */
2448 static rtx
2449 expand_builtin_interclass_mathfn (tree exp, rtx target)
2451 enum insn_code icode = CODE_FOR_nothing;
2452 rtx op0;
2453 tree fndecl = get_callee_fndecl (exp);
2454 machine_mode mode;
2455 tree arg;
2457 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2458 return NULL_RTX;
2460 arg = CALL_EXPR_ARG (exp, 0);
2461 icode = interclass_mathfn_icode (arg, fndecl);
2462 mode = TYPE_MODE (TREE_TYPE (arg));
2464 if (icode != CODE_FOR_nothing)
2466 struct expand_operand ops[1];
2467 rtx_insn *last = get_last_insn ();
2468 tree orig_arg = arg;
2470 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2471 need to expand the argument again. This way, we will not perform
2472 side-effects more the once. */
2473 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2475 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2477 if (mode != GET_MODE (op0))
2478 op0 = convert_to_mode (mode, op0, 0);
2480 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2481 if (maybe_legitimize_operands (icode, 0, 1, ops)
2482 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2483 return ops[0].value;
2485 delete_insns_since (last);
2486 CALL_EXPR_ARG (exp, 0) = orig_arg;
2489 return NULL_RTX;
2492 /* Expand a call to the builtin sincos math function.
2493 Return NULL_RTX if a normal call should be emitted rather than expanding the
2494 function in-line. EXP is the expression that is a call to the builtin
2495 function. */
2497 static rtx
2498 expand_builtin_sincos (tree exp)
2500 rtx op0, op1, op2, target1, target2;
2501 machine_mode mode;
2502 tree arg, sinp, cosp;
2503 int result;
2504 location_t loc = EXPR_LOCATION (exp);
2505 tree alias_type, alias_off;
2507 if (!validate_arglist (exp, REAL_TYPE,
2508 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2509 return NULL_RTX;
2511 arg = CALL_EXPR_ARG (exp, 0);
2512 sinp = CALL_EXPR_ARG (exp, 1);
2513 cosp = CALL_EXPR_ARG (exp, 2);
2515 /* Make a suitable register to place result in. */
2516 mode = TYPE_MODE (TREE_TYPE (arg));
2518 /* Check if sincos insn is available, otherwise emit the call. */
2519 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2520 return NULL_RTX;
2522 target1 = gen_reg_rtx (mode);
2523 target2 = gen_reg_rtx (mode);
2525 op0 = expand_normal (arg);
2526 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2527 alias_off = build_int_cst (alias_type, 0);
2528 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2529 sinp, alias_off));
2530 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2531 cosp, alias_off));
2533 /* Compute into target1 and target2.
2534 Set TARGET to wherever the result comes back. */
2535 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2536 gcc_assert (result);
2538 /* Move target1 and target2 to the memory locations indicated
2539 by op1 and op2. */
2540 emit_move_insn (op1, target1);
2541 emit_move_insn (op2, target2);
2543 return const0_rtx;
2546 /* Expand a call to the internal cexpi builtin to the sincos math function.
2547 EXP is the expression that is a call to the builtin function; if convenient,
2548 the result should be placed in TARGET. */
2550 static rtx
2551 expand_builtin_cexpi (tree exp, rtx target)
2553 tree fndecl = get_callee_fndecl (exp);
2554 tree arg, type;
2555 machine_mode mode;
2556 rtx op0, op1, op2;
2557 location_t loc = EXPR_LOCATION (exp);
2559 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2560 return NULL_RTX;
2562 arg = CALL_EXPR_ARG (exp, 0);
2563 type = TREE_TYPE (arg);
2564 mode = TYPE_MODE (TREE_TYPE (arg));
2566 /* Try expanding via a sincos optab, fall back to emitting a libcall
2567 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2568 is only generated from sincos, cexp or if we have either of them. */
2569 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2571 op1 = gen_reg_rtx (mode);
2572 op2 = gen_reg_rtx (mode);
2574 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2576 /* Compute into op1 and op2. */
2577 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2579 else if (targetm.libc_has_function (function_sincos))
2581 tree call, fn = NULL_TREE;
2582 tree top1, top2;
2583 rtx op1a, op2a;
2585 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2586 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2587 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2588 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2589 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2590 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2591 else
2592 gcc_unreachable ();
2594 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2595 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2596 op1a = copy_addr_to_reg (XEXP (op1, 0));
2597 op2a = copy_addr_to_reg (XEXP (op2, 0));
2598 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2599 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2601 /* Make sure not to fold the sincos call again. */
2602 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2603 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2604 call, 3, arg, top1, top2));
2606 else
2608 tree call, fn = NULL_TREE, narg;
2609 tree ctype = build_complex_type (type);
2611 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2612 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2613 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2614 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2615 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2616 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2617 else
2618 gcc_unreachable ();
2620 /* If we don't have a decl for cexp create one. This is the
2621 friendliest fallback if the user calls __builtin_cexpi
2622 without full target C99 function support. */
2623 if (fn == NULL_TREE)
2625 tree fntype;
2626 const char *name = NULL;
2628 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2629 name = "cexpf";
2630 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2631 name = "cexp";
2632 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2633 name = "cexpl";
2635 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2636 fn = build_fn_decl (name, fntype);
2639 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2640 build_real (type, dconst0), arg);
2642 /* Make sure not to fold the cexp call again. */
2643 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2644 return expand_expr (build_call_nary (ctype, call, 1, narg),
2645 target, VOIDmode, EXPAND_NORMAL);
2648 /* Now build the proper return type. */
2649 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2650 make_tree (TREE_TYPE (arg), op2),
2651 make_tree (TREE_TYPE (arg), op1)),
2652 target, VOIDmode, EXPAND_NORMAL);
2655 /* Conveniently construct a function call expression. FNDECL names the
2656 function to be called, N is the number of arguments, and the "..."
2657 parameters are the argument expressions. Unlike build_call_exr
2658 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2660 static tree
2661 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2663 va_list ap;
2664 tree fntype = TREE_TYPE (fndecl);
2665 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2667 va_start (ap, n);
2668 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2669 va_end (ap);
2670 SET_EXPR_LOCATION (fn, loc);
2671 return fn;
2674 /* Expand a call to one of the builtin rounding functions gcc defines
2675 as an extension (lfloor and lceil). As these are gcc extensions we
2676 do not need to worry about setting errno to EDOM.
2677 If expanding via optab fails, lower expression to (int)(floor(x)).
2678 EXP is the expression that is a call to the builtin function;
2679 if convenient, the result should be placed in TARGET. */
2681 static rtx
2682 expand_builtin_int_roundingfn (tree exp, rtx target)
2684 convert_optab builtin_optab;
2685 rtx op0, tmp;
2686 rtx_insn *insns;
2687 tree fndecl = get_callee_fndecl (exp);
2688 enum built_in_function fallback_fn;
2689 tree fallback_fndecl;
2690 machine_mode mode;
2691 tree arg;
2693 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2694 gcc_unreachable ();
2696 arg = CALL_EXPR_ARG (exp, 0);
2698 switch (DECL_FUNCTION_CODE (fndecl))
2700 CASE_FLT_FN (BUILT_IN_ICEIL):
2701 CASE_FLT_FN (BUILT_IN_LCEIL):
2702 CASE_FLT_FN (BUILT_IN_LLCEIL):
2703 builtin_optab = lceil_optab;
2704 fallback_fn = BUILT_IN_CEIL;
2705 break;
2707 CASE_FLT_FN (BUILT_IN_IFLOOR):
2708 CASE_FLT_FN (BUILT_IN_LFLOOR):
2709 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2710 builtin_optab = lfloor_optab;
2711 fallback_fn = BUILT_IN_FLOOR;
2712 break;
2714 default:
2715 gcc_unreachable ();
2718 /* Make a suitable register to place result in. */
2719 mode = TYPE_MODE (TREE_TYPE (exp));
2721 target = gen_reg_rtx (mode);
2723 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2724 need to expand the argument again. This way, we will not perform
2725 side-effects more the once. */
2726 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2728 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2730 start_sequence ();
2732 /* Compute into TARGET. */
2733 if (expand_sfix_optab (target, op0, builtin_optab))
2735 /* Output the entire sequence. */
2736 insns = get_insns ();
2737 end_sequence ();
2738 emit_insn (insns);
2739 return target;
2742 /* If we were unable to expand via the builtin, stop the sequence
2743 (without outputting the insns). */
2744 end_sequence ();
2746 /* Fall back to floating point rounding optab. */
2747 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2749 /* For non-C99 targets we may end up without a fallback fndecl here
2750 if the user called __builtin_lfloor directly. In this case emit
2751 a call to the floor/ceil variants nevertheless. This should result
2752 in the best user experience for not full C99 targets. */
2753 if (fallback_fndecl == NULL_TREE)
2755 tree fntype;
2756 const char *name = NULL;
2758 switch (DECL_FUNCTION_CODE (fndecl))
2760 case BUILT_IN_ICEIL:
2761 case BUILT_IN_LCEIL:
2762 case BUILT_IN_LLCEIL:
2763 name = "ceil";
2764 break;
2765 case BUILT_IN_ICEILF:
2766 case BUILT_IN_LCEILF:
2767 case BUILT_IN_LLCEILF:
2768 name = "ceilf";
2769 break;
2770 case BUILT_IN_ICEILL:
2771 case BUILT_IN_LCEILL:
2772 case BUILT_IN_LLCEILL:
2773 name = "ceill";
2774 break;
2775 case BUILT_IN_IFLOOR:
2776 case BUILT_IN_LFLOOR:
2777 case BUILT_IN_LLFLOOR:
2778 name = "floor";
2779 break;
2780 case BUILT_IN_IFLOORF:
2781 case BUILT_IN_LFLOORF:
2782 case BUILT_IN_LLFLOORF:
2783 name = "floorf";
2784 break;
2785 case BUILT_IN_IFLOORL:
2786 case BUILT_IN_LFLOORL:
2787 case BUILT_IN_LLFLOORL:
2788 name = "floorl";
2789 break;
2790 default:
2791 gcc_unreachable ();
2794 fntype = build_function_type_list (TREE_TYPE (arg),
2795 TREE_TYPE (arg), NULL_TREE);
2796 fallback_fndecl = build_fn_decl (name, fntype);
2799 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2801 tmp = expand_normal (exp);
2802 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2804 /* Truncate the result of floating point optab to integer
2805 via expand_fix (). */
2806 target = gen_reg_rtx (mode);
2807 expand_fix (target, tmp, 0);
2809 return target;
2812 /* Expand a call to one of the builtin math functions doing integer
2813 conversion (lrint).
2814 Return 0 if a normal call should be emitted rather than expanding the
2815 function in-line. EXP is the expression that is a call to the builtin
2816 function; if convenient, the result should be placed in TARGET. */
2818 static rtx
2819 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2821 convert_optab builtin_optab;
2822 rtx op0;
2823 rtx_insn *insns;
2824 tree fndecl = get_callee_fndecl (exp);
2825 tree arg;
2826 machine_mode mode;
2827 enum built_in_function fallback_fn = BUILT_IN_NONE;
2829 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2830 gcc_unreachable ();
2832 arg = CALL_EXPR_ARG (exp, 0);
2834 switch (DECL_FUNCTION_CODE (fndecl))
2836 CASE_FLT_FN (BUILT_IN_IRINT):
2837 fallback_fn = BUILT_IN_LRINT;
2838 gcc_fallthrough ();
2839 CASE_FLT_FN (BUILT_IN_LRINT):
2840 CASE_FLT_FN (BUILT_IN_LLRINT):
2841 builtin_optab = lrint_optab;
2842 break;
2844 CASE_FLT_FN (BUILT_IN_IROUND):
2845 fallback_fn = BUILT_IN_LROUND;
2846 gcc_fallthrough ();
2847 CASE_FLT_FN (BUILT_IN_LROUND):
2848 CASE_FLT_FN (BUILT_IN_LLROUND):
2849 builtin_optab = lround_optab;
2850 break;
2852 default:
2853 gcc_unreachable ();
2856 /* There's no easy way to detect the case we need to set EDOM. */
2857 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2858 return NULL_RTX;
2860 /* Make a suitable register to place result in. */
2861 mode = TYPE_MODE (TREE_TYPE (exp));
2863 /* There's no easy way to detect the case we need to set EDOM. */
2864 if (!flag_errno_math)
2866 rtx result = gen_reg_rtx (mode);
2868 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2869 need to expand the argument again. This way, we will not perform
2870 side-effects more the once. */
2871 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2873 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2875 start_sequence ();
2877 if (expand_sfix_optab (result, op0, builtin_optab))
2879 /* Output the entire sequence. */
2880 insns = get_insns ();
2881 end_sequence ();
2882 emit_insn (insns);
2883 return result;
2886 /* If we were unable to expand via the builtin, stop the sequence
2887 (without outputting the insns) and call to the library function
2888 with the stabilized argument list. */
2889 end_sequence ();
2892 if (fallback_fn != BUILT_IN_NONE)
2894 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2895 targets, (int) round (x) should never be transformed into
2896 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2897 a call to lround in the hope that the target provides at least some
2898 C99 functions. This should result in the best user experience for
2899 not full C99 targets. */
2900 tree fallback_fndecl = mathfn_built_in_1
2901 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2903 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2904 fallback_fndecl, 1, arg);
2906 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2907 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2908 return convert_to_mode (mode, target, 0);
2911 return expand_call (exp, target, target == const0_rtx);
2914 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2915 a normal call should be emitted rather than expanding the function
2916 in-line. EXP is the expression that is a call to the builtin
2917 function; if convenient, the result should be placed in TARGET. */
2919 static rtx
2920 expand_builtin_powi (tree exp, rtx target)
2922 tree arg0, arg1;
2923 rtx op0, op1;
2924 machine_mode mode;
2925 machine_mode mode2;
2927 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2928 return NULL_RTX;
2930 arg0 = CALL_EXPR_ARG (exp, 0);
2931 arg1 = CALL_EXPR_ARG (exp, 1);
2932 mode = TYPE_MODE (TREE_TYPE (exp));
2934 /* Emit a libcall to libgcc. */
2936 /* Mode of the 2nd argument must match that of an int. */
2937 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2939 if (target == NULL_RTX)
2940 target = gen_reg_rtx (mode);
2942 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2943 if (GET_MODE (op0) != mode)
2944 op0 = convert_to_mode (mode, op0, 0);
2945 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2946 if (GET_MODE (op1) != mode2)
2947 op1 = convert_to_mode (mode2, op1, 0);
2949 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2950 target, LCT_CONST, mode,
2951 op0, mode, op1, mode2);
2953 return target;
2956 /* Expand expression EXP which is a call to the strlen builtin. Return
2957 NULL_RTX if we failed and the caller should emit a normal call, otherwise
2958 try to get the result in TARGET, if convenient. */
2960 static rtx
2961 expand_builtin_strlen (tree exp, rtx target,
2962 machine_mode target_mode)
2964 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2965 return NULL_RTX;
2967 struct expand_operand ops[4];
2968 rtx pat;
2969 tree len;
2970 tree src = CALL_EXPR_ARG (exp, 0);
2971 rtx src_reg;
2972 rtx_insn *before_strlen;
2973 machine_mode insn_mode;
2974 enum insn_code icode = CODE_FOR_nothing;
2975 unsigned int align;
2977 /* If the length can be computed at compile-time, return it. */
2978 len = c_strlen (src, 0);
2979 if (len)
2980 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2982 /* If the length can be computed at compile-time and is constant
2983 integer, but there are side-effects in src, evaluate
2984 src for side-effects, then return len.
2985 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2986 can be optimized into: i++; x = 3; */
2987 len = c_strlen (src, 1);
2988 if (len && TREE_CODE (len) == INTEGER_CST)
2990 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2991 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2994 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2996 /* If SRC is not a pointer type, don't do this operation inline. */
2997 if (align == 0)
2998 return NULL_RTX;
3000 /* Bail out if we can't compute strlen in the right mode. */
3001 FOR_EACH_MODE_FROM (insn_mode, target_mode)
3003 icode = optab_handler (strlen_optab, insn_mode);
3004 if (icode != CODE_FOR_nothing)
3005 break;
3007 if (insn_mode == VOIDmode)
3008 return NULL_RTX;
3010 /* Make a place to hold the source address. We will not expand
3011 the actual source until we are sure that the expansion will
3012 not fail -- there are trees that cannot be expanded twice. */
3013 src_reg = gen_reg_rtx (Pmode);
3015 /* Mark the beginning of the strlen sequence so we can emit the
3016 source operand later. */
3017 before_strlen = get_last_insn ();
3019 create_output_operand (&ops[0], target, insn_mode);
3020 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3021 create_integer_operand (&ops[2], 0);
3022 create_integer_operand (&ops[3], align);
3023 if (!maybe_expand_insn (icode, 4, ops))
3024 return NULL_RTX;
3026 /* Check to see if the argument was declared attribute nonstring
3027 and if so, issue a warning since at this point it's not known
3028 to be nul-terminated. */
3029 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3031 /* Now that we are assured of success, expand the source. */
3032 start_sequence ();
3033 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3034 if (pat != src_reg)
3036 #ifdef POINTERS_EXTEND_UNSIGNED
3037 if (GET_MODE (pat) != Pmode)
3038 pat = convert_to_mode (Pmode, pat,
3039 POINTERS_EXTEND_UNSIGNED);
3040 #endif
3041 emit_move_insn (src_reg, pat);
3043 pat = get_insns ();
3044 end_sequence ();
3046 if (before_strlen)
3047 emit_insn_after (pat, before_strlen);
3048 else
3049 emit_insn_before (pat, get_insns ());
3051 /* Return the value in the proper mode for this function. */
3052 if (GET_MODE (ops[0].value) == target_mode)
3053 target = ops[0].value;
3054 else if (target != 0)
3055 convert_move (target, ops[0].value, 0);
3056 else
3057 target = convert_to_mode (target_mode, ops[0].value, 0);
3059 return target;
3062 /* Expand call EXP to the strnlen built-in, returning the result
3063 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3065 static rtx
3066 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3068 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3069 return NULL_RTX;
3071 tree src = CALL_EXPR_ARG (exp, 0);
3072 tree bound = CALL_EXPR_ARG (exp, 1);
3074 if (!bound)
3075 return NULL_RTX;
3077 location_t loc = UNKNOWN_LOCATION;
3078 if (EXPR_HAS_LOCATION (exp))
3079 loc = EXPR_LOCATION (exp);
3081 tree maxobjsize = max_object_size ();
3082 tree func = get_callee_fndecl (exp);
3084 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3085 so these conversions aren't necessary. */
3086 c_strlen_data lendata = { };
3087 tree len = c_strlen (src, 0, &lendata, 1);
3088 if (len)
3089 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3091 if (TREE_CODE (bound) == INTEGER_CST)
3093 if (!TREE_NO_WARNING (exp)
3094 && tree_int_cst_lt (maxobjsize, bound)
3095 && warning_at (loc, OPT_Wstringop_overflow_,
3096 "%K%qD specified bound %E "
3097 "exceeds maximum object size %E",
3098 exp, func, bound, maxobjsize))
3099 TREE_NO_WARNING (exp) = true;
3101 bool exact = true;
3102 if (!len || TREE_CODE (len) != INTEGER_CST)
3104 /* Clear EXACT if LEN may be less than SRC suggests,
3105 such as in
3106 strnlen (&a[i], sizeof a)
3107 where the value of i is unknown. Unless i's value is
3108 zero, the call is unsafe because the bound is greater. */
3109 lendata.decl = unterminated_array (src, &len, &exact);
3110 if (!lendata.decl)
3111 return NULL_RTX;
3114 if (lendata.decl
3115 && !TREE_NO_WARNING (exp)
3116 && ((tree_int_cst_lt (len, bound))
3117 || !exact))
3119 location_t warnloc
3120 = expansion_point_location_if_in_system_header (loc);
3122 if (warning_at (warnloc, OPT_Wstringop_overflow_,
3123 exact
3124 ? G_("%K%qD specified bound %E exceeds the size %E "
3125 "of unterminated array")
3126 : G_("%K%qD specified bound %E may exceed the size "
3127 "of at most %E of unterminated array"),
3128 exp, func, bound, len))
3130 inform (DECL_SOURCE_LOCATION (lendata.decl),
3131 "referenced argument declared here");
3132 TREE_NO_WARNING (exp) = true;
3133 return NULL_RTX;
3137 if (!len)
3138 return NULL_RTX;
3140 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3141 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3144 if (TREE_CODE (bound) != SSA_NAME)
3145 return NULL_RTX;
3147 wide_int min, max;
3148 enum value_range_kind rng = get_range_info (bound, &min, &max);
3149 if (rng != VR_RANGE)
3150 return NULL_RTX;
3152 if (!TREE_NO_WARNING (exp)
3153 && wi::ltu_p (wi::to_wide (maxobjsize), min)
3154 && warning_at (loc, OPT_Wstringop_overflow_,
3155 "%K%qD specified bound [%wu, %wu] "
3156 "exceeds maximum object size %E",
3157 exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
3158 TREE_NO_WARNING (exp) = true;
3160 bool exact = true;
3161 if (!len || TREE_CODE (len) != INTEGER_CST)
3163 lendata.decl = unterminated_array (src, &len, &exact);
3164 if (!lendata.decl)
3165 return NULL_RTX;
3168 if (lendata.decl
3169 && !TREE_NO_WARNING (exp)
3170 && (wi::ltu_p (wi::to_wide (len), min)
3171 || !exact))
3173 location_t warnloc
3174 = expansion_point_location_if_in_system_header (loc);
3176 if (warning_at (warnloc, OPT_Wstringop_overflow_,
3177 exact
3178 ? G_("%K%qD specified bound [%wu, %wu] exceeds "
3179 "the size %E of unterminated array")
3180 : G_("%K%qD specified bound [%wu, %wu] may exceed "
3181 "the size of at most %E of unterminated array"),
3182 exp, func, min.to_uhwi (), max.to_uhwi (), len))
3184 inform (DECL_SOURCE_LOCATION (lendata.decl),
3185 "referenced argument declared here");
3186 TREE_NO_WARNING (exp) = true;
3190 if (lendata.decl)
3191 return NULL_RTX;
3193 if (wi::gtu_p (min, wi::to_wide (len)))
3194 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3196 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3197 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3200 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3201 bytes from constant string DATA + OFFSET and return it as target
3202 constant. */
3204 static rtx
3205 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3206 scalar_int_mode mode)
3208 const char *str = (const char *) data;
3210 gcc_assert (offset >= 0
3211 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3212 <= strlen (str) + 1));
3214 return c_readstr (str + offset, mode);
3217 /* LEN specify length of the block of memcpy/memset operation.
3218 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3219 In some cases we can make very likely guess on max size, then we
3220 set it into PROBABLE_MAX_SIZE. */
3222 static void
3223 determine_block_size (tree len, rtx len_rtx,
3224 unsigned HOST_WIDE_INT *min_size,
3225 unsigned HOST_WIDE_INT *max_size,
3226 unsigned HOST_WIDE_INT *probable_max_size)
3228 if (CONST_INT_P (len_rtx))
3230 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3231 return;
3233 else
3235 wide_int min, max;
3236 enum value_range_kind range_type = VR_UNDEFINED;
3238 /* Determine bounds from the type. */
3239 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3240 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3241 else
3242 *min_size = 0;
3243 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3244 *probable_max_size = *max_size
3245 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3246 else
3247 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3249 if (TREE_CODE (len) == SSA_NAME)
3250 range_type = get_range_info (len, &min, &max);
3251 if (range_type == VR_RANGE)
3253 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3254 *min_size = min.to_uhwi ();
3255 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3256 *probable_max_size = *max_size = max.to_uhwi ();
3258 else if (range_type == VR_ANTI_RANGE)
3260 /* Anti range 0...N lets us to determine minimal size to N+1. */
3261 if (min == 0)
3263 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3264 *min_size = max.to_uhwi () + 1;
3266 /* Code like
3268 int n;
3269 if (n < 100)
3270 memcpy (a, b, n)
3272 Produce anti range allowing negative values of N. We still
3273 can use the information and make a guess that N is not negative.
3275 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3276 *probable_max_size = min.to_uhwi () - 1;
3279 gcc_checking_assert (*max_size <=
3280 (unsigned HOST_WIDE_INT)
3281 GET_MODE_MASK (GET_MODE (len_rtx)));
3284 /* Try to verify that the sizes and lengths of the arguments to a string
3285 manipulation function given by EXP are within valid bounds and that
3286 the operation does not lead to buffer overflow or read past the end.
3287 Arguments other than EXP may be null. When non-null, the arguments
3288 have the following meaning:
3289 DST is the destination of a copy call or NULL otherwise.
3290 SRC is the source of a copy call or NULL otherwise.
3291 DSTWRITE is the number of bytes written into the destination obtained
3292 from the user-supplied size argument to the function (such as in
3293 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3294 MAXREAD is the user-supplied bound on the length of the source sequence
3295 (such as in strncat(d, s, N). It specifies the upper limit on the number
3296 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3297 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3298 expression EXP is a string function call (as opposed to a memory call
3299 like memcpy). As an exception, SRCSTR can also be an integer denoting
3300 the precomputed size of the source string or object (for functions like
3301 memcpy).
3302 DSTSIZE is the size of the destination object specified by the last
3303 argument to the _chk builtins, typically resulting from the expansion
3304 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3305 DSTSIZE).
3307 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3308 SIZE_MAX.
3310 If the call is successfully verified as safe return true, otherwise
3311 return false. */
3313 static bool
3314 check_access (tree exp, tree, tree, tree dstwrite,
3315 tree maxread, tree srcstr, tree dstsize)
3317 int opt = OPT_Wstringop_overflow_;
3319 /* The size of the largest object is half the address space, or
3320 PTRDIFF_MAX. (This is way too permissive.) */
3321 tree maxobjsize = max_object_size ();
3323 /* Either the length of the source string for string functions or
3324 the size of the source object for raw memory functions. */
3325 tree slen = NULL_TREE;
3327 tree range[2] = { NULL_TREE, NULL_TREE };
3329 /* Set to true when the exact number of bytes written by a string
3330 function like strcpy is not known and the only thing that is
3331 known is that it must be at least one (for the terminating nul). */
3332 bool at_least_one = false;
3333 if (srcstr)
3335 /* SRCSTR is normally a pointer to string but as a special case
3336 it can be an integer denoting the length of a string. */
3337 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3339 /* Try to determine the range of lengths the source string
3340 refers to. If it can be determined and is less than
3341 the upper bound given by MAXREAD add one to it for
3342 the terminating nul. Otherwise, set it to one for
3343 the same reason, or to MAXREAD as appropriate. */
3344 c_strlen_data lendata = { };
3345 get_range_strlen (srcstr, &lendata, /* eltsize = */ 1);
3346 range[0] = lendata.minlen;
3347 range[1] = lendata.maxbound;
3348 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3350 if (maxread && tree_int_cst_le (maxread, range[0]))
3351 range[0] = range[1] = maxread;
3352 else
3353 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3354 range[0], size_one_node);
3356 if (maxread && tree_int_cst_le (maxread, range[1]))
3357 range[1] = maxread;
3358 else if (!integer_all_onesp (range[1]))
3359 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3360 range[1], size_one_node);
3362 slen = range[0];
3364 else
3366 at_least_one = true;
3367 slen = size_one_node;
3370 else
3371 slen = srcstr;
3374 if (!dstwrite && !maxread)
3376 /* When the only available piece of data is the object size
3377 there is nothing to do. */
3378 if (!slen)
3379 return true;
3381 /* Otherwise, when the length of the source sequence is known
3382 (as with strlen), set DSTWRITE to it. */
3383 if (!range[0])
3384 dstwrite = slen;
3387 if (!dstsize)
3388 dstsize = maxobjsize;
3390 if (dstwrite)
3391 get_size_range (dstwrite, range);
3393 tree func = get_callee_fndecl (exp);
3395 /* First check the number of bytes to be written against the maximum
3396 object size. */
3397 if (range[0]
3398 && TREE_CODE (range[0]) == INTEGER_CST
3399 && tree_int_cst_lt (maxobjsize, range[0]))
3401 if (TREE_NO_WARNING (exp))
3402 return false;
3404 location_t loc = tree_nonartificial_location (exp);
3405 loc = expansion_point_location_if_in_system_header (loc);
3407 bool warned;
3408 if (range[0] == range[1])
3409 warned = warning_at (loc, opt,
3410 "%K%qD specified size %E "
3411 "exceeds maximum object size %E",
3412 exp, func, range[0], maxobjsize);
3413 else
3414 warned = warning_at (loc, opt,
3415 "%K%qD specified size between %E and %E "
3416 "exceeds maximum object size %E",
3417 exp, func,
3418 range[0], range[1], maxobjsize);
3419 if (warned)
3420 TREE_NO_WARNING (exp) = true;
3422 return false;
3425 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3426 constant, and in range of unsigned HOST_WIDE_INT. */
3427 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3429 /* Next check the number of bytes to be written against the destination
3430 object size. */
3431 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3433 if (range[0]
3434 && TREE_CODE (range[0]) == INTEGER_CST
3435 && ((tree_fits_uhwi_p (dstsize)
3436 && tree_int_cst_lt (dstsize, range[0]))
3437 || (dstwrite
3438 && tree_fits_uhwi_p (dstwrite)
3439 && tree_int_cst_lt (dstwrite, range[0]))))
3441 if (TREE_NO_WARNING (exp))
3442 return false;
3444 location_t loc = tree_nonartificial_location (exp);
3445 loc = expansion_point_location_if_in_system_header (loc);
3447 if (dstwrite == slen && at_least_one)
3449 /* This is a call to strcpy with a destination of 0 size
3450 and a source of unknown length. The call will write
3451 at least one byte past the end of the destination. */
3452 warning_at (loc, opt,
3453 "%K%qD writing %E or more bytes into a region "
3454 "of size %E overflows the destination",
3455 exp, func, range[0], dstsize);
3457 else if (tree_int_cst_equal (range[0], range[1]))
3458 warning_n (loc, opt, tree_to_uhwi (range[0]),
3459 "%K%qD writing %E byte into a region "
3460 "of size %E overflows the destination",
3461 "%K%qD writing %E bytes into a region "
3462 "of size %E overflows the destination",
3463 exp, func, range[0], dstsize);
3464 else if (tree_int_cst_sign_bit (range[1]))
3466 /* Avoid printing the upper bound if it's invalid. */
3467 warning_at (loc, opt,
3468 "%K%qD writing %E or more bytes into a region "
3469 "of size %E overflows the destination",
3470 exp, func, range[0], dstsize);
3472 else
3473 warning_at (loc, opt,
3474 "%K%qD writing between %E and %E bytes into "
3475 "a region of size %E overflows the destination",
3476 exp, func, range[0], range[1],
3477 dstsize);
3479 /* Return error when an overflow has been detected. */
3480 return false;
3484 /* Check the maximum length of the source sequence against the size
3485 of the destination object if known, or against the maximum size
3486 of an object. */
3487 if (maxread)
3489 get_size_range (maxread, range);
3491 /* Use the lower end for MAXREAD from now on. */
3492 if (range[0])
3493 maxread = range[0];
3495 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3497 location_t loc = tree_nonartificial_location (exp);
3498 loc = expansion_point_location_if_in_system_header (loc);
3500 if (tree_int_cst_lt (maxobjsize, range[0]))
3502 if (TREE_NO_WARNING (exp))
3503 return false;
3505 /* Warn about crazy big sizes first since that's more
3506 likely to be meaningful than saying that the bound
3507 is greater than the object size if both are big. */
3508 if (range[0] == range[1])
3509 warning_at (loc, opt,
3510 "%K%qD specified bound %E "
3511 "exceeds maximum object size %E",
3512 exp, func,
3513 range[0], maxobjsize);
3514 else
3515 warning_at (loc, opt,
3516 "%K%qD specified bound between %E and %E "
3517 "exceeds maximum object size %E",
3518 exp, func,
3519 range[0], range[1], maxobjsize);
3521 return false;
3524 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3526 if (TREE_NO_WARNING (exp))
3527 return false;
3529 if (tree_int_cst_equal (range[0], range[1]))
3530 warning_at (loc, opt,
3531 "%K%qD specified bound %E "
3532 "exceeds destination size %E",
3533 exp, func,
3534 range[0], dstsize);
3535 else
3536 warning_at (loc, opt,
3537 "%K%qD specified bound between %E and %E "
3538 "exceeds destination size %E",
3539 exp, func,
3540 range[0], range[1], dstsize);
3541 return false;
3546 /* Check for reading past the end of SRC. */
3547 if (slen
3548 && slen == srcstr
3549 && dstwrite && range[0]
3550 && tree_int_cst_lt (slen, range[0]))
3552 if (TREE_NO_WARNING (exp))
3553 return false;
3555 location_t loc = tree_nonartificial_location (exp);
3557 if (tree_int_cst_equal (range[0], range[1]))
3558 warning_n (loc, opt, tree_to_uhwi (range[0]),
3559 "%K%qD reading %E byte from a region of size %E",
3560 "%K%qD reading %E bytes from a region of size %E",
3561 exp, func, range[0], slen);
3562 else if (tree_int_cst_sign_bit (range[1]))
3564 /* Avoid printing the upper bound if it's invalid. */
3565 warning_at (loc, opt,
3566 "%K%qD reading %E or more bytes from a region "
3567 "of size %E",
3568 exp, func, range[0], slen);
3570 else
3571 warning_at (loc, opt,
3572 "%K%qD reading between %E and %E bytes from a region "
3573 "of size %E",
3574 exp, func, range[0], range[1], slen);
3575 return false;
3578 return true;
3581 /* Helper to compute the size of the object referenced by the DEST
3582 expression which must have pointer type, using Object Size type
3583 OSTYPE (only the least significant 2 bits are used). Return
3584 an estimate of the size of the object if successful or NULL when
3585 the size cannot be determined. When the referenced object involves
3586 a non-constant offset in some range the returned value represents
3587 the largest size given the smallest non-negative offset in the
3588 range. The function is intended for diagnostics and should not
3589 be used to influence code generation or optimization. */
3591 tree
3592 compute_objsize (tree dest, int ostype)
3594 unsigned HOST_WIDE_INT size;
3596 /* Only the two least significant bits are meaningful. */
3597 ostype &= 3;
3599 if (compute_builtin_object_size (dest, ostype, &size))
3600 return build_int_cst (sizetype, size);
3602 if (TREE_CODE (dest) == SSA_NAME)
3604 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3605 if (!is_gimple_assign (stmt))
3606 return NULL_TREE;
3608 dest = gimple_assign_rhs1 (stmt);
3610 tree_code code = gimple_assign_rhs_code (stmt);
3611 if (code == POINTER_PLUS_EXPR)
3613 /* compute_builtin_object_size fails for addresses with
3614 non-constant offsets. Try to determine the range of
3615 such an offset here and use it to adjust the constant
3616 size. */
3617 tree off = gimple_assign_rhs2 (stmt);
3618 if (TREE_CODE (off) == INTEGER_CST)
3620 if (tree size = compute_objsize (dest, ostype))
3622 wide_int wioff = wi::to_wide (off);
3623 wide_int wisiz = wi::to_wide (size);
3625 /* Ignore negative offsets for now. For others,
3626 use the lower bound as the most optimistic
3627 estimate of the (remaining) size. */
3628 if (wi::sign_mask (wioff))
3630 else if (wi::ltu_p (wioff, wisiz))
3631 return wide_int_to_tree (TREE_TYPE (size),
3632 wi::sub (wisiz, wioff));
3633 else
3634 return size_zero_node;
3637 else if (TREE_CODE (off) == SSA_NAME
3638 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3640 wide_int min, max;
3641 enum value_range_kind rng = get_range_info (off, &min, &max);
3643 if (rng == VR_RANGE)
3645 if (tree size = compute_objsize (dest, ostype))
3647 wide_int wisiz = wi::to_wide (size);
3649 /* Ignore negative offsets for now. For others,
3650 use the lower bound as the most optimistic
3651 estimate of the (remaining)size. */
3652 if (wi::sign_mask (min))
3654 else if (wi::ltu_p (min, wisiz))
3655 return wide_int_to_tree (TREE_TYPE (size),
3656 wi::sub (wisiz, min));
3657 else
3658 return size_zero_node;
3663 else if (code != ADDR_EXPR)
3664 return NULL_TREE;
3667 /* Unless computing the largest size (for memcpy and other raw memory
3668 functions), try to determine the size of the object from its type. */
3669 if (!ostype)
3670 return NULL_TREE;
3672 if (TREE_CODE (dest) != ADDR_EXPR)
3673 return NULL_TREE;
3675 tree type = TREE_TYPE (dest);
3676 if (TREE_CODE (type) == POINTER_TYPE)
3677 type = TREE_TYPE (type);
3679 type = TYPE_MAIN_VARIANT (type);
3681 if (TREE_CODE (type) == ARRAY_TYPE
3682 && !array_at_struct_end_p (TREE_OPERAND (dest, 0)))
3684 /* Return the constant size unless it's zero (that's a zero-length
3685 array likely at the end of a struct). */
3686 tree size = TYPE_SIZE_UNIT (type);
3687 if (size && TREE_CODE (size) == INTEGER_CST
3688 && !integer_zerop (size))
3689 return size;
3692 return NULL_TREE;
3695 /* Helper to determine and check the sizes of the source and the destination
3696 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3697 call expression, DEST is the destination argument, SRC is the source
3698 argument or null, and LEN is the number of bytes. Use Object Size type-0
3699 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3700 (no overflow or invalid sizes), false otherwise. */
3702 static bool
3703 check_memop_access (tree exp, tree dest, tree src, tree size)
3705 /* For functions like memset and memcpy that operate on raw memory
3706 try to determine the size of the largest source and destination
3707 object using type-0 Object Size regardless of the object size
3708 type specified by the option. */
3709 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3710 tree dstsize = compute_objsize (dest, 0);
3712 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3713 srcsize, dstsize);
3716 /* Validate memchr arguments without performing any expansion.
3717 Return NULL_RTX. */
3719 static rtx
3720 expand_builtin_memchr (tree exp, rtx)
3722 if (!validate_arglist (exp,
3723 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3724 return NULL_RTX;
3726 tree arg1 = CALL_EXPR_ARG (exp, 0);
3727 tree len = CALL_EXPR_ARG (exp, 2);
3729 /* Diagnose calls where the specified length exceeds the size
3730 of the object. */
3731 if (warn_stringop_overflow)
3733 tree size = compute_objsize (arg1, 0);
3734 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3735 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
3738 return NULL_RTX;
3741 /* Expand a call EXP to the memcpy builtin.
3742 Return NULL_RTX if we failed, the caller should emit a normal call,
3743 otherwise try to get the result in TARGET, if convenient (and in
3744 mode MODE if that's convenient). */
3746 static rtx
3747 expand_builtin_memcpy (tree exp, rtx target)
3749 if (!validate_arglist (exp,
3750 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3751 return NULL_RTX;
3753 tree dest = CALL_EXPR_ARG (exp, 0);
3754 tree src = CALL_EXPR_ARG (exp, 1);
3755 tree len = CALL_EXPR_ARG (exp, 2);
3757 check_memop_access (exp, dest, src, len);
3759 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3760 /*retmode=*/ RETURN_BEGIN);
3763 /* Check a call EXP to the memmove built-in for validity.
3764 Return NULL_RTX on both success and failure. */
3766 static rtx
3767 expand_builtin_memmove (tree exp, rtx)
3769 if (!validate_arglist (exp,
3770 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3771 return NULL_RTX;
3773 tree dest = CALL_EXPR_ARG (exp, 0);
3774 tree src = CALL_EXPR_ARG (exp, 1);
3775 tree len = CALL_EXPR_ARG (exp, 2);
3777 check_memop_access (exp, dest, src, len);
3779 return NULL_RTX;
3782 /* Expand a call EXP to the mempcpy builtin.
3783 Return NULL_RTX if we failed; the caller should emit a normal call,
3784 otherwise try to get the result in TARGET, if convenient (and in
3785 mode MODE if that's convenient). */
3787 static rtx
3788 expand_builtin_mempcpy (tree exp, rtx target)
3790 if (!validate_arglist (exp,
3791 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3792 return NULL_RTX;
3794 tree dest = CALL_EXPR_ARG (exp, 0);
3795 tree src = CALL_EXPR_ARG (exp, 1);
3796 tree len = CALL_EXPR_ARG (exp, 2);
3798 /* Policy does not generally allow using compute_objsize (which
3799 is used internally by check_memop_size) to change code generation
3800 or drive optimization decisions.
3802 In this instance it is safe because the code we generate has
3803 the same semantics regardless of the return value of
3804 check_memop_sizes. Exactly the same amount of data is copied
3805 and the return value is exactly the same in both cases.
3807 Furthermore, check_memop_size always uses mode 0 for the call to
3808 compute_objsize, so the imprecise nature of compute_objsize is
3809 avoided. */
3811 /* Avoid expanding mempcpy into memcpy when the call is determined
3812 to overflow the buffer. This also prevents the same overflow
3813 from being diagnosed again when expanding memcpy. */
3814 if (!check_memop_access (exp, dest, src, len))
3815 return NULL_RTX;
3817 return expand_builtin_mempcpy_args (dest, src, len,
3818 target, exp, /*retmode=*/ RETURN_END);
3821 /* Helper function to do the actual work for expand of memory copy family
3822 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3823 of memory from SRC to DEST and assign to TARGET if convenient. Return
3824 value is based on RETMODE argument. */
3826 static rtx
3827 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3828 rtx target, tree exp, memop_ret retmode)
3830 const char *src_str;
3831 unsigned int src_align = get_pointer_alignment (src);
3832 unsigned int dest_align = get_pointer_alignment (dest);
3833 rtx dest_mem, src_mem, dest_addr, len_rtx;
3834 HOST_WIDE_INT expected_size = -1;
3835 unsigned int expected_align = 0;
3836 unsigned HOST_WIDE_INT min_size;
3837 unsigned HOST_WIDE_INT max_size;
3838 unsigned HOST_WIDE_INT probable_max_size;
3840 /* If DEST is not a pointer type, call the normal function. */
3841 if (dest_align == 0)
3842 return NULL_RTX;
3844 /* If either SRC is not a pointer type, don't do this
3845 operation in-line. */
3846 if (src_align == 0)
3847 return NULL_RTX;
3849 if (currently_expanding_gimple_stmt)
3850 stringop_block_profile (currently_expanding_gimple_stmt,
3851 &expected_align, &expected_size);
3853 if (expected_align < dest_align)
3854 expected_align = dest_align;
3855 dest_mem = get_memory_rtx (dest, len);
3856 set_mem_align (dest_mem, dest_align);
3857 len_rtx = expand_normal (len);
3858 determine_block_size (len, len_rtx, &min_size, &max_size,
3859 &probable_max_size);
3860 src_str = c_getstr (src);
3862 /* If SRC is a string constant and block move would be done
3863 by pieces, we can avoid loading the string from memory
3864 and only stored the computed constants. */
3865 if (src_str
3866 && CONST_INT_P (len_rtx)
3867 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3868 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3869 CONST_CAST (char *, src_str),
3870 dest_align, false))
3872 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3873 builtin_memcpy_read_str,
3874 CONST_CAST (char *, src_str),
3875 dest_align, false, retmode);
3876 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3877 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3878 return dest_mem;
3881 src_mem = get_memory_rtx (src, len);
3882 set_mem_align (src_mem, src_align);
3884 /* Copy word part most expediently. */
3885 enum block_op_methods method = BLOCK_OP_NORMAL;
3886 if (CALL_EXPR_TAILCALL (exp)
3887 && (retmode == RETURN_BEGIN || target == const0_rtx))
3888 method = BLOCK_OP_TAILCALL;
3889 if (retmode == RETURN_END && target != const0_rtx)
3890 method = BLOCK_OP_NO_LIBCALL_RET;
3891 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3892 expected_align, expected_size,
3893 min_size, max_size, probable_max_size);
3894 if (dest_addr == pc_rtx)
3895 return NULL_RTX;
3897 if (dest_addr == 0)
3899 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3900 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3903 if (retmode != RETURN_BEGIN && target != const0_rtx)
3905 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3906 /* stpcpy pointer to last byte. */
3907 if (retmode == RETURN_END_MINUS_ONE)
3908 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3911 return dest_addr;
3914 static rtx
3915 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3916 rtx target, tree orig_exp, memop_ret retmode)
3918 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3919 retmode);
3922 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3923 we failed, the caller should emit a normal call, otherwise try to
3924 get the result in TARGET, if convenient.
3925 Return value is based on RETMODE argument. */
3927 static rtx
3928 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
3930 struct expand_operand ops[3];
3931 rtx dest_mem;
3932 rtx src_mem;
3934 if (!targetm.have_movstr ())
3935 return NULL_RTX;
3937 dest_mem = get_memory_rtx (dest, NULL);
3938 src_mem = get_memory_rtx (src, NULL);
3939 if (retmode == RETURN_BEGIN)
3941 target = force_reg (Pmode, XEXP (dest_mem, 0));
3942 dest_mem = replace_equiv_address (dest_mem, target);
3945 create_output_operand (&ops[0],
3946 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
3947 create_fixed_operand (&ops[1], dest_mem);
3948 create_fixed_operand (&ops[2], src_mem);
3949 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3950 return NULL_RTX;
3952 if (retmode != RETURN_BEGIN && target != const0_rtx)
3954 target = ops[0].value;
3955 /* movstr is supposed to set end to the address of the NUL
3956 terminator. If the caller requested a mempcpy-like return value,
3957 adjust it. */
3958 if (retmode == RETURN_END)
3960 rtx tem = plus_constant (GET_MODE (target),
3961 gen_lowpart (GET_MODE (target), target), 1);
3962 emit_move_insn (target, force_operand (tem, NULL_RTX));
3965 return target;
3968 /* Do some very basic size validation of a call to the strcpy builtin
3969 given by EXP. Return NULL_RTX to have the built-in expand to a call
3970 to the library function. */
3972 static rtx
3973 expand_builtin_strcat (tree exp, rtx)
3975 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3976 || !warn_stringop_overflow)
3977 return NULL_RTX;
3979 tree dest = CALL_EXPR_ARG (exp, 0);
3980 tree src = CALL_EXPR_ARG (exp, 1);
3982 /* There is no way here to determine the length of the string in
3983 the destination to which the SRC string is being appended so
3984 just diagnose cases when the souce string is longer than
3985 the destination object. */
3987 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3989 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
3990 destsize);
3992 return NULL_RTX;
3995 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3996 NULL_RTX if we failed the caller should emit a normal call, otherwise
3997 try to get the result in TARGET, if convenient (and in mode MODE if that's
3998 convenient). */
4000 static rtx
4001 expand_builtin_strcpy (tree exp, rtx target)
4003 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4004 return NULL_RTX;
4006 tree dest = CALL_EXPR_ARG (exp, 0);
4007 tree src = CALL_EXPR_ARG (exp, 1);
4009 if (warn_stringop_overflow)
4011 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4012 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4013 src, destsize);
4016 if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
4018 /* Check to see if the argument was declared attribute nonstring
4019 and if so, issue a warning since at this point it's not known
4020 to be nul-terminated. */
4021 tree fndecl = get_callee_fndecl (exp);
4022 maybe_warn_nonstring_arg (fndecl, exp);
4023 return ret;
4026 return NULL_RTX;
4029 /* Helper function to do the actual work for expand_builtin_strcpy. The
4030 arguments to the builtin_strcpy call DEST and SRC are broken out
4031 so that this can also be called without constructing an actual CALL_EXPR.
4032 The other arguments and return value are the same as for
4033 expand_builtin_strcpy. */
4035 static rtx
4036 expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target)
4038 /* Detect strcpy calls with unterminated arrays.. */
4039 if (tree nonstr = unterminated_array (src))
4041 /* NONSTR refers to the non-nul terminated constant array. */
4042 if (!TREE_NO_WARNING (exp))
4043 warn_string_no_nul (EXPR_LOCATION (exp), "strcpy", src, nonstr);
4044 return NULL_RTX;
4047 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
4050 /* Expand a call EXP to the stpcpy builtin.
4051 Return NULL_RTX if we failed the caller should emit a normal call,
4052 otherwise try to get the result in TARGET, if convenient (and in
4053 mode MODE if that's convenient). */
4055 static rtx
4056 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
4058 tree dst, src;
4059 location_t loc = EXPR_LOCATION (exp);
4061 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4062 return NULL_RTX;
4064 dst = CALL_EXPR_ARG (exp, 0);
4065 src = CALL_EXPR_ARG (exp, 1);
4067 if (warn_stringop_overflow)
4069 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
4070 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4071 src, destsize);
4074 /* If return value is ignored, transform stpcpy into strcpy. */
4075 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
4077 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
4078 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
4079 return expand_expr (result, target, mode, EXPAND_NORMAL);
4081 else
4083 tree len, lenp1;
4084 rtx ret;
4086 /* Ensure we get an actual string whose length can be evaluated at
4087 compile-time, not an expression containing a string. This is
4088 because the latter will potentially produce pessimized code
4089 when used to produce the return value. */
4090 c_strlen_data lendata = { };
4091 if (!c_getstr (src, NULL)
4092 || !(len = c_strlen (src, 0, &lendata, 1)))
4093 return expand_movstr (dst, src, target,
4094 /*retmode=*/ RETURN_END_MINUS_ONE);
4096 if (lendata.decl && !TREE_NO_WARNING (exp))
4097 warn_string_no_nul (EXPR_LOCATION (exp), "stpcpy", src, lendata.decl);
4099 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
4100 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
4101 target, exp,
4102 /*retmode=*/ RETURN_END_MINUS_ONE);
4104 if (ret)
4105 return ret;
4107 if (TREE_CODE (len) == INTEGER_CST)
4109 rtx len_rtx = expand_normal (len);
4111 if (CONST_INT_P (len_rtx))
4113 ret = expand_builtin_strcpy_args (exp, dst, src, target);
4115 if (ret)
4117 if (! target)
4119 if (mode != VOIDmode)
4120 target = gen_reg_rtx (mode);
4121 else
4122 target = gen_reg_rtx (GET_MODE (ret));
4124 if (GET_MODE (target) != GET_MODE (ret))
4125 ret = gen_lowpart (GET_MODE (target), ret);
4127 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
4128 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
4129 gcc_assert (ret);
4131 return target;
4136 return expand_movstr (dst, src, target,
4137 /*retmode=*/ RETURN_END_MINUS_ONE);
4141 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
4142 arguments while being careful to avoid duplicate warnings (which could
4143 be issued if the expander were to expand the call, resulting in it
4144 being emitted in expand_call(). */
4146 static rtx
4147 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
4149 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
4151 /* The call has been successfully expanded. Check for nonstring
4152 arguments and issue warnings as appropriate. */
4153 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
4154 return ret;
4157 return NULL_RTX;
4160 /* Check a call EXP to the stpncpy built-in for validity.
4161 Return NULL_RTX on both success and failure. */
4163 static rtx
4164 expand_builtin_stpncpy (tree exp, rtx)
4166 if (!validate_arglist (exp,
4167 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4168 || !warn_stringop_overflow)
4169 return NULL_RTX;
4171 /* The source and destination of the call. */
4172 tree dest = CALL_EXPR_ARG (exp, 0);
4173 tree src = CALL_EXPR_ARG (exp, 1);
4175 /* The exact number of bytes to write (not the maximum). */
4176 tree len = CALL_EXPR_ARG (exp, 2);
4178 /* The size of the destination object. */
4179 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4181 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
4183 return NULL_RTX;
4186 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4187 bytes from constant string DATA + OFFSET and return it as target
4188 constant. */
4191 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
4192 scalar_int_mode mode)
4194 const char *str = (const char *) data;
4196 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4197 return const0_rtx;
4199 return c_readstr (str + offset, mode);
4202 /* Helper to check the sizes of sequences and the destination of calls
4203 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4204 success (no overflow or invalid sizes), false otherwise. */
4206 static bool
4207 check_strncat_sizes (tree exp, tree objsize)
4209 tree dest = CALL_EXPR_ARG (exp, 0);
4210 tree src = CALL_EXPR_ARG (exp, 1);
4211 tree maxread = CALL_EXPR_ARG (exp, 2);
4213 /* Try to determine the range of lengths that the source expression
4214 refers to. */
4215 c_strlen_data lendata = { };
4216 get_range_strlen (src, &lendata, /* eltsize = */ 1);
4218 /* Try to verify that the destination is big enough for the shortest
4219 string. */
4221 if (!objsize && warn_stringop_overflow)
4223 /* If it hasn't been provided by __strncat_chk, try to determine
4224 the size of the destination object into which the source is
4225 being copied. */
4226 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
4229 /* Add one for the terminating nul. */
4230 tree srclen = (lendata.minlen
4231 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
4232 size_one_node)
4233 : NULL_TREE);
4235 /* The strncat function copies at most MAXREAD bytes and always appends
4236 the terminating nul so the specified upper bound should never be equal
4237 to (or greater than) the size of the destination. */
4238 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4239 && tree_int_cst_equal (objsize, maxread))
4241 location_t loc = tree_nonartificial_location (exp);
4242 loc = expansion_point_location_if_in_system_header (loc);
4244 warning_at (loc, OPT_Wstringop_overflow_,
4245 "%K%qD specified bound %E equals destination size",
4246 exp, get_callee_fndecl (exp), maxread);
4248 return false;
4251 if (!srclen
4252 || (maxread && tree_fits_uhwi_p (maxread)
4253 && tree_fits_uhwi_p (srclen)
4254 && tree_int_cst_lt (maxread, srclen)))
4255 srclen = maxread;
4257 /* The number of bytes to write is LEN but check_access will also
4258 check SRCLEN if LEN's value isn't known. */
4259 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4260 objsize);
4263 /* Similar to expand_builtin_strcat, do some very basic size validation
4264 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4265 the built-in expand to a call to the library function. */
4267 static rtx
4268 expand_builtin_strncat (tree exp, rtx)
4270 if (!validate_arglist (exp,
4271 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4272 || !warn_stringop_overflow)
4273 return NULL_RTX;
4275 tree dest = CALL_EXPR_ARG (exp, 0);
4276 tree src = CALL_EXPR_ARG (exp, 1);
4277 /* The upper bound on the number of bytes to write. */
4278 tree maxread = CALL_EXPR_ARG (exp, 2);
4279 /* The length of the source sequence. */
4280 tree slen = c_strlen (src, 1);
4282 /* Try to determine the range of lengths that the source expression
4283 refers to. Since the lengths are only used for warning and not
4284 for code generation disable strict mode below. */
4285 tree maxlen = slen;
4286 if (!maxlen)
4288 c_strlen_data lendata = { };
4289 get_range_strlen (src, &lendata, /* eltsize = */ 1);
4290 maxlen = lendata.maxbound;
4293 /* Try to verify that the destination is big enough for the shortest
4294 string. First try to determine the size of the destination object
4295 into which the source is being copied. */
4296 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4298 /* Add one for the terminating nul. */
4299 tree srclen = (maxlen
4300 ? fold_build2 (PLUS_EXPR, size_type_node, maxlen,
4301 size_one_node)
4302 : NULL_TREE);
4304 /* The strncat function copies at most MAXREAD bytes and always appends
4305 the terminating nul so the specified upper bound should never be equal
4306 to (or greater than) the size of the destination. */
4307 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4308 && tree_int_cst_equal (destsize, maxread))
4310 location_t loc = tree_nonartificial_location (exp);
4311 loc = expansion_point_location_if_in_system_header (loc);
4313 warning_at (loc, OPT_Wstringop_overflow_,
4314 "%K%qD specified bound %E equals destination size",
4315 exp, get_callee_fndecl (exp), maxread);
4317 return NULL_RTX;
4320 if (!srclen
4321 || (maxread && tree_fits_uhwi_p (maxread)
4322 && tree_fits_uhwi_p (srclen)
4323 && tree_int_cst_lt (maxread, srclen)))
4324 srclen = maxread;
4326 /* The number of bytes to write is SRCLEN. */
4327 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
4329 return NULL_RTX;
4332 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4333 NULL_RTX if we failed the caller should emit a normal call. */
4335 static rtx
4336 expand_builtin_strncpy (tree exp, rtx target)
4338 location_t loc = EXPR_LOCATION (exp);
4340 if (validate_arglist (exp,
4341 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4343 tree dest = CALL_EXPR_ARG (exp, 0);
4344 tree src = CALL_EXPR_ARG (exp, 1);
4345 /* The number of bytes to write (not the maximum). */
4346 tree len = CALL_EXPR_ARG (exp, 2);
4347 /* The length of the source sequence. */
4348 tree slen = c_strlen (src, 1);
4350 if (warn_stringop_overflow)
4352 tree destsize = compute_objsize (dest,
4353 warn_stringop_overflow - 1);
4355 /* The number of bytes to write is LEN but check_access will also
4356 check SLEN if LEN's value isn't known. */
4357 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4358 destsize);
4361 /* We must be passed a constant len and src parameter. */
4362 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4363 return NULL_RTX;
4365 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4367 /* We're required to pad with trailing zeros if the requested
4368 len is greater than strlen(s2)+1. In that case try to
4369 use store_by_pieces, if it fails, punt. */
4370 if (tree_int_cst_lt (slen, len))
4372 unsigned int dest_align = get_pointer_alignment (dest);
4373 const char *p = c_getstr (src);
4374 rtx dest_mem;
4376 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4377 || !can_store_by_pieces (tree_to_uhwi (len),
4378 builtin_strncpy_read_str,
4379 CONST_CAST (char *, p),
4380 dest_align, false))
4381 return NULL_RTX;
4383 dest_mem = get_memory_rtx (dest, len);
4384 store_by_pieces (dest_mem, tree_to_uhwi (len),
4385 builtin_strncpy_read_str,
4386 CONST_CAST (char *, p), dest_align, false,
4387 RETURN_BEGIN);
4388 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4389 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4390 return dest_mem;
4393 return NULL_RTX;
4396 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4397 bytes from constant string DATA + OFFSET and return it as target
4398 constant. */
4401 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4402 scalar_int_mode mode)
4404 const char *c = (const char *) data;
4405 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4407 memset (p, *c, GET_MODE_SIZE (mode));
4409 return c_readstr (p, mode);
4412 /* Callback routine for store_by_pieces. Return the RTL of a register
4413 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4414 char value given in the RTL register data. For example, if mode is
4415 4 bytes wide, return the RTL for 0x01010101*data. */
4417 static rtx
4418 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4419 scalar_int_mode mode)
4421 rtx target, coeff;
4422 size_t size;
4423 char *p;
4425 size = GET_MODE_SIZE (mode);
4426 if (size == 1)
4427 return (rtx) data;
4429 p = XALLOCAVEC (char, size);
4430 memset (p, 1, size);
4431 coeff = c_readstr (p, mode);
4433 target = convert_to_mode (mode, (rtx) data, 1);
4434 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4435 return force_reg (mode, target);
4438 /* Expand expression EXP, which is a call to the memset builtin. Return
4439 NULL_RTX if we failed the caller should emit a normal call, otherwise
4440 try to get the result in TARGET, if convenient (and in mode MODE if that's
4441 convenient). */
4443 static rtx
4444 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4446 if (!validate_arglist (exp,
4447 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4448 return NULL_RTX;
4450 tree dest = CALL_EXPR_ARG (exp, 0);
4451 tree val = CALL_EXPR_ARG (exp, 1);
4452 tree len = CALL_EXPR_ARG (exp, 2);
4454 check_memop_access (exp, dest, NULL_TREE, len);
4456 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4459 /* Helper function to do the actual work for expand_builtin_memset. The
4460 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4461 so that this can also be called without constructing an actual CALL_EXPR.
4462 The other arguments and return value are the same as for
4463 expand_builtin_memset. */
4465 static rtx
4466 expand_builtin_memset_args (tree dest, tree val, tree len,
4467 rtx target, machine_mode mode, tree orig_exp)
4469 tree fndecl, fn;
4470 enum built_in_function fcode;
4471 machine_mode val_mode;
4472 char c;
4473 unsigned int dest_align;
4474 rtx dest_mem, dest_addr, len_rtx;
4475 HOST_WIDE_INT expected_size = -1;
4476 unsigned int expected_align = 0;
4477 unsigned HOST_WIDE_INT min_size;
4478 unsigned HOST_WIDE_INT max_size;
4479 unsigned HOST_WIDE_INT probable_max_size;
4481 dest_align = get_pointer_alignment (dest);
4483 /* If DEST is not a pointer type, don't do this operation in-line. */
4484 if (dest_align == 0)
4485 return NULL_RTX;
4487 if (currently_expanding_gimple_stmt)
4488 stringop_block_profile (currently_expanding_gimple_stmt,
4489 &expected_align, &expected_size);
4491 if (expected_align < dest_align)
4492 expected_align = dest_align;
4494 /* If the LEN parameter is zero, return DEST. */
4495 if (integer_zerop (len))
4497 /* Evaluate and ignore VAL in case it has side-effects. */
4498 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4499 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4502 /* Stabilize the arguments in case we fail. */
4503 dest = builtin_save_expr (dest);
4504 val = builtin_save_expr (val);
4505 len = builtin_save_expr (len);
4507 len_rtx = expand_normal (len);
4508 determine_block_size (len, len_rtx, &min_size, &max_size,
4509 &probable_max_size);
4510 dest_mem = get_memory_rtx (dest, len);
4511 val_mode = TYPE_MODE (unsigned_char_type_node);
4513 if (TREE_CODE (val) != INTEGER_CST)
4515 rtx val_rtx;
4517 val_rtx = expand_normal (val);
4518 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4520 /* Assume that we can memset by pieces if we can store
4521 * the coefficients by pieces (in the required modes).
4522 * We can't pass builtin_memset_gen_str as that emits RTL. */
4523 c = 1;
4524 if (tree_fits_uhwi_p (len)
4525 && can_store_by_pieces (tree_to_uhwi (len),
4526 builtin_memset_read_str, &c, dest_align,
4527 true))
4529 val_rtx = force_reg (val_mode, val_rtx);
4530 store_by_pieces (dest_mem, tree_to_uhwi (len),
4531 builtin_memset_gen_str, val_rtx, dest_align,
4532 true, RETURN_BEGIN);
4534 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4535 dest_align, expected_align,
4536 expected_size, min_size, max_size,
4537 probable_max_size))
4538 goto do_libcall;
4540 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4541 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4542 return dest_mem;
4545 if (target_char_cast (val, &c))
4546 goto do_libcall;
4548 if (c)
4550 if (tree_fits_uhwi_p (len)
4551 && can_store_by_pieces (tree_to_uhwi (len),
4552 builtin_memset_read_str, &c, dest_align,
4553 true))
4554 store_by_pieces (dest_mem, tree_to_uhwi (len),
4555 builtin_memset_read_str, &c, dest_align, true,
4556 RETURN_BEGIN);
4557 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4558 gen_int_mode (c, val_mode),
4559 dest_align, expected_align,
4560 expected_size, min_size, max_size,
4561 probable_max_size))
4562 goto do_libcall;
4564 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4565 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4566 return dest_mem;
4569 set_mem_align (dest_mem, dest_align);
4570 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4571 CALL_EXPR_TAILCALL (orig_exp)
4572 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4573 expected_align, expected_size,
4574 min_size, max_size,
4575 probable_max_size);
4577 if (dest_addr == 0)
4579 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4580 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4583 return dest_addr;
4585 do_libcall:
4586 fndecl = get_callee_fndecl (orig_exp);
4587 fcode = DECL_FUNCTION_CODE (fndecl);
4588 if (fcode == BUILT_IN_MEMSET)
4589 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4590 dest, val, len);
4591 else if (fcode == BUILT_IN_BZERO)
4592 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4593 dest, len);
4594 else
4595 gcc_unreachable ();
4596 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4597 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4598 return expand_call (fn, target, target == const0_rtx);
4601 /* Expand expression EXP, which is a call to the bzero builtin. Return
4602 NULL_RTX if we failed the caller should emit a normal call. */
4604 static rtx
4605 expand_builtin_bzero (tree exp)
4607 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4608 return NULL_RTX;
4610 tree dest = CALL_EXPR_ARG (exp, 0);
4611 tree size = CALL_EXPR_ARG (exp, 1);
4613 check_memop_access (exp, dest, NULL_TREE, size);
4615 /* New argument list transforming bzero(ptr x, int y) to
4616 memset(ptr x, int 0, size_t y). This is done this way
4617 so that if it isn't expanded inline, we fallback to
4618 calling bzero instead of memset. */
4620 location_t loc = EXPR_LOCATION (exp);
4622 return expand_builtin_memset_args (dest, integer_zero_node,
4623 fold_convert_loc (loc,
4624 size_type_node, size),
4625 const0_rtx, VOIDmode, exp);
4628 /* Try to expand cmpstr operation ICODE with the given operands.
4629 Return the result rtx on success, otherwise return null. */
4631 static rtx
4632 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4633 HOST_WIDE_INT align)
4635 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4637 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4638 target = NULL_RTX;
4640 struct expand_operand ops[4];
4641 create_output_operand (&ops[0], target, insn_mode);
4642 create_fixed_operand (&ops[1], arg1_rtx);
4643 create_fixed_operand (&ops[2], arg2_rtx);
4644 create_integer_operand (&ops[3], align);
4645 if (maybe_expand_insn (icode, 4, ops))
4646 return ops[0].value;
4647 return NULL_RTX;
4650 /* Expand expression EXP, which is a call to the memcmp built-in function.
4651 Return NULL_RTX if we failed and the caller should emit a normal call,
4652 otherwise try to get the result in TARGET, if convenient.
4653 RESULT_EQ is true if we can relax the returned value to be either zero
4654 or nonzero, without caring about the sign. */
4656 static rtx
4657 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4659 if (!validate_arglist (exp,
4660 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4661 return NULL_RTX;
4663 tree arg1 = CALL_EXPR_ARG (exp, 0);
4664 tree arg2 = CALL_EXPR_ARG (exp, 1);
4665 tree len = CALL_EXPR_ARG (exp, 2);
4666 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4667 bool no_overflow = true;
4669 /* Diagnose calls where the specified length exceeds the size of either
4670 object. */
4671 tree size = compute_objsize (arg1, 0);
4672 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4673 len, /*maxread=*/NULL_TREE, size,
4674 /*objsize=*/NULL_TREE);
4675 if (no_overflow)
4677 size = compute_objsize (arg2, 0);
4678 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4679 len, /*maxread=*/NULL_TREE, size,
4680 /*objsize=*/NULL_TREE);
4683 /* If the specified length exceeds the size of either object,
4684 call the function. */
4685 if (!no_overflow)
4686 return NULL_RTX;
4688 /* Due to the performance benefit, always inline the calls first
4689 when result_eq is false. */
4690 rtx result = NULL_RTX;
4692 if (!result_eq && fcode != BUILT_IN_BCMP)
4694 result = inline_expand_builtin_string_cmp (exp, target);
4695 if (result)
4696 return result;
4699 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4700 location_t loc = EXPR_LOCATION (exp);
4702 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4703 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4705 /* If we don't have POINTER_TYPE, call the function. */
4706 if (arg1_align == 0 || arg2_align == 0)
4707 return NULL_RTX;
4709 rtx arg1_rtx = get_memory_rtx (arg1, len);
4710 rtx arg2_rtx = get_memory_rtx (arg2, len);
4711 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4713 /* Set MEM_SIZE as appropriate. */
4714 if (CONST_INT_P (len_rtx))
4716 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4717 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4720 by_pieces_constfn constfn = NULL;
4722 const char *src_str = c_getstr (arg2);
4723 if (result_eq && src_str == NULL)
4725 src_str = c_getstr (arg1);
4726 if (src_str != NULL)
4727 std::swap (arg1_rtx, arg2_rtx);
4730 /* If SRC is a string constant and block move would be done
4731 by pieces, we can avoid loading the string from memory
4732 and only stored the computed constants. */
4733 if (src_str
4734 && CONST_INT_P (len_rtx)
4735 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4736 constfn = builtin_memcpy_read_str;
4738 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4739 TREE_TYPE (len), target,
4740 result_eq, constfn,
4741 CONST_CAST (char *, src_str));
4743 if (result)
4745 /* Return the value in the proper mode for this function. */
4746 if (GET_MODE (result) == mode)
4747 return result;
4749 if (target != 0)
4751 convert_move (target, result, 0);
4752 return target;
4755 return convert_to_mode (mode, result, 0);
4758 return NULL_RTX;
4761 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4762 if we failed the caller should emit a normal call, otherwise try to get
4763 the result in TARGET, if convenient. */
4765 static rtx
4766 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4768 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4769 return NULL_RTX;
4771 /* Due to the performance benefit, always inline the calls first. */
4772 rtx result = NULL_RTX;
4773 result = inline_expand_builtin_string_cmp (exp, target);
4774 if (result)
4775 return result;
4777 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4778 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4779 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4780 return NULL_RTX;
4782 tree arg1 = CALL_EXPR_ARG (exp, 0);
4783 tree arg2 = CALL_EXPR_ARG (exp, 1);
4785 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4786 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4788 /* If we don't have POINTER_TYPE, call the function. */
4789 if (arg1_align == 0 || arg2_align == 0)
4790 return NULL_RTX;
4792 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4793 arg1 = builtin_save_expr (arg1);
4794 arg2 = builtin_save_expr (arg2);
4796 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4797 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4799 /* Try to call cmpstrsi. */
4800 if (cmpstr_icode != CODE_FOR_nothing)
4801 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4802 MIN (arg1_align, arg2_align));
4804 /* Try to determine at least one length and call cmpstrnsi. */
4805 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4807 tree len;
4808 rtx arg3_rtx;
4810 tree len1 = c_strlen (arg1, 1);
4811 tree len2 = c_strlen (arg2, 1);
4813 if (len1)
4814 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4815 if (len2)
4816 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4818 /* If we don't have a constant length for the first, use the length
4819 of the second, if we know it. We don't require a constant for
4820 this case; some cost analysis could be done if both are available
4821 but neither is constant. For now, assume they're equally cheap,
4822 unless one has side effects. If both strings have constant lengths,
4823 use the smaller. */
4825 if (!len1)
4826 len = len2;
4827 else if (!len2)
4828 len = len1;
4829 else if (TREE_SIDE_EFFECTS (len1))
4830 len = len2;
4831 else if (TREE_SIDE_EFFECTS (len2))
4832 len = len1;
4833 else if (TREE_CODE (len1) != INTEGER_CST)
4834 len = len2;
4835 else if (TREE_CODE (len2) != INTEGER_CST)
4836 len = len1;
4837 else if (tree_int_cst_lt (len1, len2))
4838 len = len1;
4839 else
4840 len = len2;
4842 /* If both arguments have side effects, we cannot optimize. */
4843 if (len && !TREE_SIDE_EFFECTS (len))
4845 arg3_rtx = expand_normal (len);
4846 result = expand_cmpstrn_or_cmpmem
4847 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4848 arg3_rtx, MIN (arg1_align, arg2_align));
4852 tree fndecl = get_callee_fndecl (exp);
4853 if (result)
4855 /* Check to see if the argument was declared attribute nonstring
4856 and if so, issue a warning since at this point it's not known
4857 to be nul-terminated. */
4858 maybe_warn_nonstring_arg (fndecl, exp);
4860 /* Return the value in the proper mode for this function. */
4861 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4862 if (GET_MODE (result) == mode)
4863 return result;
4864 if (target == 0)
4865 return convert_to_mode (mode, result, 0);
4866 convert_move (target, result, 0);
4867 return target;
4870 /* Expand the library call ourselves using a stabilized argument
4871 list to avoid re-evaluating the function's arguments twice. */
4872 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4873 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4874 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4875 return expand_call (fn, target, target == const0_rtx);
4878 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4879 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4880 the result in TARGET, if convenient. */
4882 static rtx
4883 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4884 ATTRIBUTE_UNUSED machine_mode mode)
4886 if (!validate_arglist (exp,
4887 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4888 return NULL_RTX;
4890 /* Due to the performance benefit, always inline the calls first. */
4891 rtx result = NULL_RTX;
4892 result = inline_expand_builtin_string_cmp (exp, target);
4893 if (result)
4894 return result;
4896 /* If c_strlen can determine an expression for one of the string
4897 lengths, and it doesn't have side effects, then emit cmpstrnsi
4898 using length MIN(strlen(string)+1, arg3). */
4899 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4900 if (cmpstrn_icode == CODE_FOR_nothing)
4901 return NULL_RTX;
4903 tree len;
4905 tree arg1 = CALL_EXPR_ARG (exp, 0);
4906 tree arg2 = CALL_EXPR_ARG (exp, 1);
4907 tree arg3 = CALL_EXPR_ARG (exp, 2);
4909 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4910 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4912 tree len1 = c_strlen (arg1, 1);
4913 tree len2 = c_strlen (arg2, 1);
4915 location_t loc = EXPR_LOCATION (exp);
4917 if (len1)
4918 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4919 if (len2)
4920 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4922 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4924 /* If we don't have a constant length for the first, use the length
4925 of the second, if we know it. If neither string is constant length,
4926 use the given length argument. We don't require a constant for
4927 this case; some cost analysis could be done if both are available
4928 but neither is constant. For now, assume they're equally cheap,
4929 unless one has side effects. If both strings have constant lengths,
4930 use the smaller. */
4932 if (!len1 && !len2)
4933 len = len3;
4934 else if (!len1)
4935 len = len2;
4936 else if (!len2)
4937 len = len1;
4938 else if (TREE_SIDE_EFFECTS (len1))
4939 len = len2;
4940 else if (TREE_SIDE_EFFECTS (len2))
4941 len = len1;
4942 else if (TREE_CODE (len1) != INTEGER_CST)
4943 len = len2;
4944 else if (TREE_CODE (len2) != INTEGER_CST)
4945 len = len1;
4946 else if (tree_int_cst_lt (len1, len2))
4947 len = len1;
4948 else
4949 len = len2;
4951 /* If we are not using the given length, we must incorporate it here.
4952 The actual new length parameter will be MIN(len,arg3) in this case. */
4953 if (len != len3)
4955 len = fold_convert_loc (loc, sizetype, len);
4956 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4958 rtx arg1_rtx = get_memory_rtx (arg1, len);
4959 rtx arg2_rtx = get_memory_rtx (arg2, len);
4960 rtx arg3_rtx = expand_normal (len);
4961 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4962 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4963 MIN (arg1_align, arg2_align));
4965 tree fndecl = get_callee_fndecl (exp);
4966 if (result)
4968 /* Check to see if the argument was declared attribute nonstring
4969 and if so, issue a warning since at this point it's not known
4970 to be nul-terminated. */
4971 maybe_warn_nonstring_arg (fndecl, exp);
4973 /* Return the value in the proper mode for this function. */
4974 mode = TYPE_MODE (TREE_TYPE (exp));
4975 if (GET_MODE (result) == mode)
4976 return result;
4977 if (target == 0)
4978 return convert_to_mode (mode, result, 0);
4979 convert_move (target, result, 0);
4980 return target;
4983 /* Expand the library call ourselves using a stabilized argument
4984 list to avoid re-evaluating the function's arguments twice. */
4985 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4986 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4987 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4988 return expand_call (fn, target, target == const0_rtx);
4991 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4992 if that's convenient. */
4995 expand_builtin_saveregs (void)
4997 rtx val;
4998 rtx_insn *seq;
5000 /* Don't do __builtin_saveregs more than once in a function.
5001 Save the result of the first call and reuse it. */
5002 if (saveregs_value != 0)
5003 return saveregs_value;
5005 /* When this function is called, it means that registers must be
5006 saved on entry to this function. So we migrate the call to the
5007 first insn of this function. */
5009 start_sequence ();
5011 /* Do whatever the machine needs done in this case. */
5012 val = targetm.calls.expand_builtin_saveregs ();
5014 seq = get_insns ();
5015 end_sequence ();
5017 saveregs_value = val;
5019 /* Put the insns after the NOTE that starts the function. If this
5020 is inside a start_sequence, make the outer-level insn chain current, so
5021 the code is placed at the start of the function. */
5022 push_topmost_sequence ();
5023 emit_insn_after (seq, entry_of_function ());
5024 pop_topmost_sequence ();
5026 return val;
5029 /* Expand a call to __builtin_next_arg. */
5031 static rtx
5032 expand_builtin_next_arg (void)
5034 /* Checking arguments is already done in fold_builtin_next_arg
5035 that must be called before this function. */
5036 return expand_binop (ptr_mode, add_optab,
5037 crtl->args.internal_arg_pointer,
5038 crtl->args.arg_offset_rtx,
5039 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5042 /* Make it easier for the backends by protecting the valist argument
5043 from multiple evaluations. */
5045 static tree
5046 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
5048 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
5050 /* The current way of determining the type of valist is completely
5051 bogus. We should have the information on the va builtin instead. */
5052 if (!vatype)
5053 vatype = targetm.fn_abi_va_list (cfun->decl);
5055 if (TREE_CODE (vatype) == ARRAY_TYPE)
5057 if (TREE_SIDE_EFFECTS (valist))
5058 valist = save_expr (valist);
5060 /* For this case, the backends will be expecting a pointer to
5061 vatype, but it's possible we've actually been given an array
5062 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
5063 So fix it. */
5064 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5066 tree p1 = build_pointer_type (TREE_TYPE (vatype));
5067 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
5070 else
5072 tree pt = build_pointer_type (vatype);
5074 if (! needs_lvalue)
5076 if (! TREE_SIDE_EFFECTS (valist))
5077 return valist;
5079 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
5080 TREE_SIDE_EFFECTS (valist) = 1;
5083 if (TREE_SIDE_EFFECTS (valist))
5084 valist = save_expr (valist);
5085 valist = fold_build2_loc (loc, MEM_REF,
5086 vatype, valist, build_int_cst (pt, 0));
5089 return valist;
5092 /* The "standard" definition of va_list is void*. */
5094 tree
5095 std_build_builtin_va_list (void)
5097 return ptr_type_node;
5100 /* The "standard" abi va_list is va_list_type_node. */
5102 tree
5103 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
5105 return va_list_type_node;
5108 /* The "standard" type of va_list is va_list_type_node. */
5110 tree
5111 std_canonical_va_list_type (tree type)
5113 tree wtype, htype;
5115 wtype = va_list_type_node;
5116 htype = type;
5118 if (TREE_CODE (wtype) == ARRAY_TYPE)
5120 /* If va_list is an array type, the argument may have decayed
5121 to a pointer type, e.g. by being passed to another function.
5122 In that case, unwrap both types so that we can compare the
5123 underlying records. */
5124 if (TREE_CODE (htype) == ARRAY_TYPE
5125 || POINTER_TYPE_P (htype))
5127 wtype = TREE_TYPE (wtype);
5128 htype = TREE_TYPE (htype);
5131 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
5132 return va_list_type_node;
5134 return NULL_TREE;
5137 /* The "standard" implementation of va_start: just assign `nextarg' to
5138 the variable. */
5140 void
5141 std_expand_builtin_va_start (tree valist, rtx nextarg)
5143 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
5144 convert_move (va_r, nextarg, 0);
5147 /* Expand EXP, a call to __builtin_va_start. */
5149 static rtx
5150 expand_builtin_va_start (tree exp)
5152 rtx nextarg;
5153 tree valist;
5154 location_t loc = EXPR_LOCATION (exp);
5156 if (call_expr_nargs (exp) < 2)
5158 error_at (loc, "too few arguments to function %<va_start%>");
5159 return const0_rtx;
5162 if (fold_builtin_next_arg (exp, true))
5163 return const0_rtx;
5165 nextarg = expand_builtin_next_arg ();
5166 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
5168 if (targetm.expand_builtin_va_start)
5169 targetm.expand_builtin_va_start (valist, nextarg);
5170 else
5171 std_expand_builtin_va_start (valist, nextarg);
5173 return const0_rtx;
5176 /* Expand EXP, a call to __builtin_va_end. */
5178 static rtx
5179 expand_builtin_va_end (tree exp)
5181 tree valist = CALL_EXPR_ARG (exp, 0);
5183 /* Evaluate for side effects, if needed. I hate macros that don't
5184 do that. */
5185 if (TREE_SIDE_EFFECTS (valist))
5186 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5188 return const0_rtx;
5191 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5192 builtin rather than just as an assignment in stdarg.h because of the
5193 nastiness of array-type va_list types. */
5195 static rtx
5196 expand_builtin_va_copy (tree exp)
5198 tree dst, src, t;
5199 location_t loc = EXPR_LOCATION (exp);
5201 dst = CALL_EXPR_ARG (exp, 0);
5202 src = CALL_EXPR_ARG (exp, 1);
5204 dst = stabilize_va_list_loc (loc, dst, 1);
5205 src = stabilize_va_list_loc (loc, src, 0);
5207 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5209 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5211 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5212 TREE_SIDE_EFFECTS (t) = 1;
5213 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5215 else
5217 rtx dstb, srcb, size;
5219 /* Evaluate to pointers. */
5220 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5221 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5222 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5223 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5225 dstb = convert_memory_address (Pmode, dstb);
5226 srcb = convert_memory_address (Pmode, srcb);
5228 /* "Dereference" to BLKmode memories. */
5229 dstb = gen_rtx_MEM (BLKmode, dstb);
5230 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5231 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5232 srcb = gen_rtx_MEM (BLKmode, srcb);
5233 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5234 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5236 /* Copy. */
5237 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5240 return const0_rtx;
5243 /* Expand a call to one of the builtin functions __builtin_frame_address or
5244 __builtin_return_address. */
5246 static rtx
5247 expand_builtin_frame_address (tree fndecl, tree exp)
5249 /* The argument must be a nonnegative integer constant.
5250 It counts the number of frames to scan up the stack.
5251 The value is either the frame pointer value or the return
5252 address saved in that frame. */
5253 if (call_expr_nargs (exp) == 0)
5254 /* Warning about missing arg was already issued. */
5255 return const0_rtx;
5256 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5258 error ("invalid argument to %qD", fndecl);
5259 return const0_rtx;
5261 else
5263 /* Number of frames to scan up the stack. */
5264 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5266 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5268 /* Some ports cannot access arbitrary stack frames. */
5269 if (tem == NULL)
5271 warning (0, "unsupported argument to %qD", fndecl);
5272 return const0_rtx;
5275 if (count)
5277 /* Warn since no effort is made to ensure that any frame
5278 beyond the current one exists or can be safely reached. */
5279 warning (OPT_Wframe_address, "calling %qD with "
5280 "a nonzero argument is unsafe", fndecl);
5283 /* For __builtin_frame_address, return what we've got. */
5284 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5285 return tem;
5287 if (!REG_P (tem)
5288 && ! CONSTANT_P (tem))
5289 tem = copy_addr_to_reg (tem);
5290 return tem;
5294 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5295 failed and the caller should emit a normal call. */
5297 static rtx
5298 expand_builtin_alloca (tree exp)
5300 rtx op0;
5301 rtx result;
5302 unsigned int align;
5303 tree fndecl = get_callee_fndecl (exp);
5304 HOST_WIDE_INT max_size;
5305 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5306 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5307 bool valid_arglist
5308 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5309 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5310 VOID_TYPE)
5311 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5312 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5313 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5315 if (!valid_arglist)
5316 return NULL_RTX;
5318 if ((alloca_for_var
5319 && warn_vla_limit >= HOST_WIDE_INT_MAX
5320 && warn_alloc_size_limit < warn_vla_limit)
5321 || (!alloca_for_var
5322 && warn_alloca_limit >= HOST_WIDE_INT_MAX
5323 && warn_alloc_size_limit < warn_alloca_limit
5326 /* -Walloca-larger-than and -Wvla-larger-than settings of
5327 less than HOST_WIDE_INT_MAX override the more general
5328 -Walloc-size-larger-than so unless either of the former
5329 options is smaller than the last one (wchich would imply
5330 that the call was already checked), check the alloca
5331 arguments for overflow. */
5332 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5333 int idx[] = { 0, -1 };
5334 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5337 /* Compute the argument. */
5338 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5340 /* Compute the alignment. */
5341 align = (fcode == BUILT_IN_ALLOCA
5342 ? BIGGEST_ALIGNMENT
5343 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5345 /* Compute the maximum size. */
5346 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5347 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5348 : -1);
5350 /* Allocate the desired space. If the allocation stems from the declaration
5351 of a variable-sized object, it cannot accumulate. */
5352 result
5353 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5354 result = convert_memory_address (ptr_mode, result);
5356 return result;
5359 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5360 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5361 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5362 handle_builtin_stack_restore function. */
5364 static rtx
5365 expand_asan_emit_allocas_unpoison (tree exp)
5367 tree arg0 = CALL_EXPR_ARG (exp, 0);
5368 tree arg1 = CALL_EXPR_ARG (exp, 1);
5369 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5370 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5371 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5372 stack_pointer_rtx, NULL_RTX, 0,
5373 OPTAB_LIB_WIDEN);
5374 off = convert_modes (ptr_mode, Pmode, off, 0);
5375 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5376 OPTAB_LIB_WIDEN);
5377 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5378 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5379 top, ptr_mode, bot, ptr_mode);
5380 return ret;
5383 /* Expand a call to bswap builtin in EXP.
5384 Return NULL_RTX if a normal call should be emitted rather than expanding the
5385 function in-line. If convenient, the result should be placed in TARGET.
5386 SUBTARGET may be used as the target for computing one of EXP's operands. */
5388 static rtx
5389 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5390 rtx subtarget)
5392 tree arg;
5393 rtx op0;
5395 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5396 return NULL_RTX;
5398 arg = CALL_EXPR_ARG (exp, 0);
5399 op0 = expand_expr (arg,
5400 subtarget && GET_MODE (subtarget) == target_mode
5401 ? subtarget : NULL_RTX,
5402 target_mode, EXPAND_NORMAL);
5403 if (GET_MODE (op0) != target_mode)
5404 op0 = convert_to_mode (target_mode, op0, 1);
5406 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5408 gcc_assert (target);
5410 return convert_to_mode (target_mode, target, 1);
5413 /* Expand a call to a unary builtin in EXP.
5414 Return NULL_RTX if a normal call should be emitted rather than expanding the
5415 function in-line. If convenient, the result should be placed in TARGET.
5416 SUBTARGET may be used as the target for computing one of EXP's operands. */
5418 static rtx
5419 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5420 rtx subtarget, optab op_optab)
5422 rtx op0;
5424 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5425 return NULL_RTX;
5427 /* Compute the argument. */
5428 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5429 (subtarget
5430 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5431 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5432 VOIDmode, EXPAND_NORMAL);
5433 /* Compute op, into TARGET if possible.
5434 Set TARGET to wherever the result comes back. */
5435 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5436 op_optab, op0, target, op_optab != clrsb_optab);
5437 gcc_assert (target);
5439 return convert_to_mode (target_mode, target, 0);
5442 /* Expand a call to __builtin_expect. We just return our argument
5443 as the builtin_expect semantic should've been already executed by
5444 tree branch prediction pass. */
5446 static rtx
5447 expand_builtin_expect (tree exp, rtx target)
5449 tree arg;
5451 if (call_expr_nargs (exp) < 2)
5452 return const0_rtx;
5453 arg = CALL_EXPR_ARG (exp, 0);
5455 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5456 /* When guessing was done, the hints should be already stripped away. */
5457 gcc_assert (!flag_guess_branch_prob
5458 || optimize == 0 || seen_error ());
5459 return target;
5462 /* Expand a call to __builtin_expect_with_probability. We just return our
5463 argument as the builtin_expect semantic should've been already executed by
5464 tree branch prediction pass. */
5466 static rtx
5467 expand_builtin_expect_with_probability (tree exp, rtx target)
5469 tree arg;
5471 if (call_expr_nargs (exp) < 3)
5472 return const0_rtx;
5473 arg = CALL_EXPR_ARG (exp, 0);
5475 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5476 /* When guessing was done, the hints should be already stripped away. */
5477 gcc_assert (!flag_guess_branch_prob
5478 || optimize == 0 || seen_error ());
5479 return target;
5483 /* Expand a call to __builtin_assume_aligned. We just return our first
5484 argument as the builtin_assume_aligned semantic should've been already
5485 executed by CCP. */
5487 static rtx
5488 expand_builtin_assume_aligned (tree exp, rtx target)
5490 if (call_expr_nargs (exp) < 2)
5491 return const0_rtx;
5492 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5493 EXPAND_NORMAL);
5494 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5495 && (call_expr_nargs (exp) < 3
5496 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5497 return target;
5500 void
5501 expand_builtin_trap (void)
5503 if (targetm.have_trap ())
5505 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5506 /* For trap insns when not accumulating outgoing args force
5507 REG_ARGS_SIZE note to prevent crossjumping of calls with
5508 different args sizes. */
5509 if (!ACCUMULATE_OUTGOING_ARGS)
5510 add_args_size_note (insn, stack_pointer_delta);
5512 else
5514 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5515 tree call_expr = build_call_expr (fn, 0);
5516 expand_call (call_expr, NULL_RTX, false);
5519 emit_barrier ();
5522 /* Expand a call to __builtin_unreachable. We do nothing except emit
5523 a barrier saying that control flow will not pass here.
5525 It is the responsibility of the program being compiled to ensure
5526 that control flow does never reach __builtin_unreachable. */
5527 static void
5528 expand_builtin_unreachable (void)
5530 emit_barrier ();
5533 /* Expand EXP, a call to fabs, fabsf or fabsl.
5534 Return NULL_RTX if a normal call should be emitted rather than expanding
5535 the function inline. If convenient, the result should be placed
5536 in TARGET. SUBTARGET may be used as the target for computing
5537 the operand. */
5539 static rtx
5540 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5542 machine_mode mode;
5543 tree arg;
5544 rtx op0;
5546 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5547 return NULL_RTX;
5549 arg = CALL_EXPR_ARG (exp, 0);
5550 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5551 mode = TYPE_MODE (TREE_TYPE (arg));
5552 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5553 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5556 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5557 Return NULL is a normal call should be emitted rather than expanding the
5558 function inline. If convenient, the result should be placed in TARGET.
5559 SUBTARGET may be used as the target for computing the operand. */
5561 static rtx
5562 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5564 rtx op0, op1;
5565 tree arg;
5567 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5568 return NULL_RTX;
5570 arg = CALL_EXPR_ARG (exp, 0);
5571 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5573 arg = CALL_EXPR_ARG (exp, 1);
5574 op1 = expand_normal (arg);
5576 return expand_copysign (op0, op1, target);
5579 /* Expand a call to __builtin___clear_cache. */
5581 static rtx
5582 expand_builtin___clear_cache (tree exp)
5584 if (!targetm.code_for_clear_cache)
5586 #ifdef CLEAR_INSN_CACHE
5587 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5588 does something. Just do the default expansion to a call to
5589 __clear_cache(). */
5590 return NULL_RTX;
5591 #else
5592 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5593 does nothing. There is no need to call it. Do nothing. */
5594 return const0_rtx;
5595 #endif /* CLEAR_INSN_CACHE */
5598 /* We have a "clear_cache" insn, and it will handle everything. */
5599 tree begin, end;
5600 rtx begin_rtx, end_rtx;
5602 /* We must not expand to a library call. If we did, any
5603 fallback library function in libgcc that might contain a call to
5604 __builtin___clear_cache() would recurse infinitely. */
5605 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5607 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5608 return const0_rtx;
5611 if (targetm.have_clear_cache ())
5613 struct expand_operand ops[2];
5615 begin = CALL_EXPR_ARG (exp, 0);
5616 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5618 end = CALL_EXPR_ARG (exp, 1);
5619 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5621 create_address_operand (&ops[0], begin_rtx);
5622 create_address_operand (&ops[1], end_rtx);
5623 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5624 return const0_rtx;
5626 return const0_rtx;
5629 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5631 static rtx
5632 round_trampoline_addr (rtx tramp)
5634 rtx temp, addend, mask;
5636 /* If we don't need too much alignment, we'll have been guaranteed
5637 proper alignment by get_trampoline_type. */
5638 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5639 return tramp;
5641 /* Round address up to desired boundary. */
5642 temp = gen_reg_rtx (Pmode);
5643 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5644 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5646 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5647 temp, 0, OPTAB_LIB_WIDEN);
5648 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5649 temp, 0, OPTAB_LIB_WIDEN);
5651 return tramp;
5654 static rtx
5655 expand_builtin_init_trampoline (tree exp, bool onstack)
5657 tree t_tramp, t_func, t_chain;
5658 rtx m_tramp, r_tramp, r_chain, tmp;
5660 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5661 POINTER_TYPE, VOID_TYPE))
5662 return NULL_RTX;
5664 t_tramp = CALL_EXPR_ARG (exp, 0);
5665 t_func = CALL_EXPR_ARG (exp, 1);
5666 t_chain = CALL_EXPR_ARG (exp, 2);
5668 r_tramp = expand_normal (t_tramp);
5669 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5670 MEM_NOTRAP_P (m_tramp) = 1;
5672 /* If ONSTACK, the TRAMP argument should be the address of a field
5673 within the local function's FRAME decl. Either way, let's see if
5674 we can fill in the MEM_ATTRs for this memory. */
5675 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5676 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5678 /* Creator of a heap trampoline is responsible for making sure the
5679 address is aligned to at least STACK_BOUNDARY. Normally malloc
5680 will ensure this anyhow. */
5681 tmp = round_trampoline_addr (r_tramp);
5682 if (tmp != r_tramp)
5684 m_tramp = change_address (m_tramp, BLKmode, tmp);
5685 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5686 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5689 /* The FUNC argument should be the address of the nested function.
5690 Extract the actual function decl to pass to the hook. */
5691 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5692 t_func = TREE_OPERAND (t_func, 0);
5693 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5695 r_chain = expand_normal (t_chain);
5697 /* Generate insns to initialize the trampoline. */
5698 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5700 if (onstack)
5702 trampolines_created = 1;
5704 if (targetm.calls.custom_function_descriptors != 0)
5705 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5706 "trampoline generated for nested function %qD", t_func);
5709 return const0_rtx;
5712 static rtx
5713 expand_builtin_adjust_trampoline (tree exp)
5715 rtx tramp;
5717 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5718 return NULL_RTX;
5720 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5721 tramp = round_trampoline_addr (tramp);
5722 if (targetm.calls.trampoline_adjust_address)
5723 tramp = targetm.calls.trampoline_adjust_address (tramp);
5725 return tramp;
5728 /* Expand a call to the builtin descriptor initialization routine.
5729 A descriptor is made up of a couple of pointers to the static
5730 chain and the code entry in this order. */
5732 static rtx
5733 expand_builtin_init_descriptor (tree exp)
5735 tree t_descr, t_func, t_chain;
5736 rtx m_descr, r_descr, r_func, r_chain;
5738 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5739 VOID_TYPE))
5740 return NULL_RTX;
5742 t_descr = CALL_EXPR_ARG (exp, 0);
5743 t_func = CALL_EXPR_ARG (exp, 1);
5744 t_chain = CALL_EXPR_ARG (exp, 2);
5746 r_descr = expand_normal (t_descr);
5747 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5748 MEM_NOTRAP_P (m_descr) = 1;
5750 r_func = expand_normal (t_func);
5751 r_chain = expand_normal (t_chain);
5753 /* Generate insns to initialize the descriptor. */
5754 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5755 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5756 POINTER_SIZE / BITS_PER_UNIT), r_func);
5758 return const0_rtx;
5761 /* Expand a call to the builtin descriptor adjustment routine. */
5763 static rtx
5764 expand_builtin_adjust_descriptor (tree exp)
5766 rtx tramp;
5768 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5769 return NULL_RTX;
5771 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5773 /* Unalign the descriptor to allow runtime identification. */
5774 tramp = plus_constant (ptr_mode, tramp,
5775 targetm.calls.custom_function_descriptors);
5777 return force_operand (tramp, NULL_RTX);
5780 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5781 function. The function first checks whether the back end provides
5782 an insn to implement signbit for the respective mode. If not, it
5783 checks whether the floating point format of the value is such that
5784 the sign bit can be extracted. If that is not the case, error out.
5785 EXP is the expression that is a call to the builtin function; if
5786 convenient, the result should be placed in TARGET. */
5787 static rtx
5788 expand_builtin_signbit (tree exp, rtx target)
5790 const struct real_format *fmt;
5791 scalar_float_mode fmode;
5792 scalar_int_mode rmode, imode;
5793 tree arg;
5794 int word, bitpos;
5795 enum insn_code icode;
5796 rtx temp;
5797 location_t loc = EXPR_LOCATION (exp);
5799 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5800 return NULL_RTX;
5802 arg = CALL_EXPR_ARG (exp, 0);
5803 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5804 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5805 fmt = REAL_MODE_FORMAT (fmode);
5807 arg = builtin_save_expr (arg);
5809 /* Expand the argument yielding a RTX expression. */
5810 temp = expand_normal (arg);
5812 /* Check if the back end provides an insn that handles signbit for the
5813 argument's mode. */
5814 icode = optab_handler (signbit_optab, fmode);
5815 if (icode != CODE_FOR_nothing)
5817 rtx_insn *last = get_last_insn ();
5818 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5819 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5820 return target;
5821 delete_insns_since (last);
5824 /* For floating point formats without a sign bit, implement signbit
5825 as "ARG < 0.0". */
5826 bitpos = fmt->signbit_ro;
5827 if (bitpos < 0)
5829 /* But we can't do this if the format supports signed zero. */
5830 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5832 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5833 build_real (TREE_TYPE (arg), dconst0));
5834 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5837 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5839 imode = int_mode_for_mode (fmode).require ();
5840 temp = gen_lowpart (imode, temp);
5842 else
5844 imode = word_mode;
5845 /* Handle targets with different FP word orders. */
5846 if (FLOAT_WORDS_BIG_ENDIAN)
5847 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5848 else
5849 word = bitpos / BITS_PER_WORD;
5850 temp = operand_subword_force (temp, word, fmode);
5851 bitpos = bitpos % BITS_PER_WORD;
5854 /* Force the intermediate word_mode (or narrower) result into a
5855 register. This avoids attempting to create paradoxical SUBREGs
5856 of floating point modes below. */
5857 temp = force_reg (imode, temp);
5859 /* If the bitpos is within the "result mode" lowpart, the operation
5860 can be implement with a single bitwise AND. Otherwise, we need
5861 a right shift and an AND. */
5863 if (bitpos < GET_MODE_BITSIZE (rmode))
5865 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5867 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5868 temp = gen_lowpart (rmode, temp);
5869 temp = expand_binop (rmode, and_optab, temp,
5870 immed_wide_int_const (mask, rmode),
5871 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5873 else
5875 /* Perform a logical right shift to place the signbit in the least
5876 significant bit, then truncate the result to the desired mode
5877 and mask just this bit. */
5878 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5879 temp = gen_lowpart (rmode, temp);
5880 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5881 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5884 return temp;
5887 /* Expand fork or exec calls. TARGET is the desired target of the
5888 call. EXP is the call. FN is the
5889 identificator of the actual function. IGNORE is nonzero if the
5890 value is to be ignored. */
5892 static rtx
5893 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5895 tree id, decl;
5896 tree call;
5898 /* If we are not profiling, just call the function. */
5899 if (!profile_arc_flag)
5900 return NULL_RTX;
5902 /* Otherwise call the wrapper. This should be equivalent for the rest of
5903 compiler, so the code does not diverge, and the wrapper may run the
5904 code necessary for keeping the profiling sane. */
5906 switch (DECL_FUNCTION_CODE (fn))
5908 case BUILT_IN_FORK:
5909 id = get_identifier ("__gcov_fork");
5910 break;
5912 case BUILT_IN_EXECL:
5913 id = get_identifier ("__gcov_execl");
5914 break;
5916 case BUILT_IN_EXECV:
5917 id = get_identifier ("__gcov_execv");
5918 break;
5920 case BUILT_IN_EXECLP:
5921 id = get_identifier ("__gcov_execlp");
5922 break;
5924 case BUILT_IN_EXECLE:
5925 id = get_identifier ("__gcov_execle");
5926 break;
5928 case BUILT_IN_EXECVP:
5929 id = get_identifier ("__gcov_execvp");
5930 break;
5932 case BUILT_IN_EXECVE:
5933 id = get_identifier ("__gcov_execve");
5934 break;
5936 default:
5937 gcc_unreachable ();
5940 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5941 FUNCTION_DECL, id, TREE_TYPE (fn));
5942 DECL_EXTERNAL (decl) = 1;
5943 TREE_PUBLIC (decl) = 1;
5944 DECL_ARTIFICIAL (decl) = 1;
5945 TREE_NOTHROW (decl) = 1;
5946 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5947 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5948 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5949 return expand_call (call, target, ignore);
5954 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5955 the pointer in these functions is void*, the tree optimizers may remove
5956 casts. The mode computed in expand_builtin isn't reliable either, due
5957 to __sync_bool_compare_and_swap.
5959 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5960 group of builtins. This gives us log2 of the mode size. */
5962 static inline machine_mode
5963 get_builtin_sync_mode (int fcode_diff)
5965 /* The size is not negotiable, so ask not to get BLKmode in return
5966 if the target indicates that a smaller size would be better. */
5967 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5970 /* Expand the memory expression LOC and return the appropriate memory operand
5971 for the builtin_sync operations. */
5973 static rtx
5974 get_builtin_sync_mem (tree loc, machine_mode mode)
5976 rtx addr, mem;
5977 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
5978 ? TREE_TYPE (TREE_TYPE (loc))
5979 : TREE_TYPE (loc));
5980 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
5982 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
5983 addr = convert_memory_address (addr_mode, addr);
5985 /* Note that we explicitly do not want any alias information for this
5986 memory, so that we kill all other live memories. Otherwise we don't
5987 satisfy the full barrier semantics of the intrinsic. */
5988 mem = gen_rtx_MEM (mode, addr);
5990 set_mem_addr_space (mem, addr_space);
5992 mem = validize_mem (mem);
5994 /* The alignment needs to be at least according to that of the mode. */
5995 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5996 get_pointer_alignment (loc)));
5997 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5998 MEM_VOLATILE_P (mem) = 1;
6000 return mem;
6003 /* Make sure an argument is in the right mode.
6004 EXP is the tree argument.
6005 MODE is the mode it should be in. */
6007 static rtx
6008 expand_expr_force_mode (tree exp, machine_mode mode)
6010 rtx val;
6011 machine_mode old_mode;
6013 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
6014 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6015 of CONST_INTs, where we know the old_mode only from the call argument. */
6017 old_mode = GET_MODE (val);
6018 if (old_mode == VOIDmode)
6019 old_mode = TYPE_MODE (TREE_TYPE (exp));
6020 val = convert_modes (mode, old_mode, val, 1);
6021 return val;
6025 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6026 EXP is the CALL_EXPR. CODE is the rtx code
6027 that corresponds to the arithmetic or logical operation from the name;
6028 an exception here is that NOT actually means NAND. TARGET is an optional
6029 place for us to store the results; AFTER is true if this is the
6030 fetch_and_xxx form. */
6032 static rtx
6033 expand_builtin_sync_operation (machine_mode mode, tree exp,
6034 enum rtx_code code, bool after,
6035 rtx target)
6037 rtx val, mem;
6038 location_t loc = EXPR_LOCATION (exp);
6040 if (code == NOT && warn_sync_nand)
6042 tree fndecl = get_callee_fndecl (exp);
6043 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6045 static bool warned_f_a_n, warned_n_a_f;
6047 switch (fcode)
6049 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6050 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6051 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6052 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6053 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6054 if (warned_f_a_n)
6055 break;
6057 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
6058 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6059 warned_f_a_n = true;
6060 break;
6062 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6063 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6064 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6065 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6066 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6067 if (warned_n_a_f)
6068 break;
6070 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
6071 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6072 warned_n_a_f = true;
6073 break;
6075 default:
6076 gcc_unreachable ();
6080 /* Expand the operands. */
6081 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6082 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6084 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
6085 after);
6088 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6089 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6090 true if this is the boolean form. TARGET is a place for us to store the
6091 results; this is NOT optional if IS_BOOL is true. */
6093 static rtx
6094 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
6095 bool is_bool, rtx target)
6097 rtx old_val, new_val, mem;
6098 rtx *pbool, *poval;
6100 /* Expand the operands. */
6101 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6102 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6103 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6105 pbool = poval = NULL;
6106 if (target != const0_rtx)
6108 if (is_bool)
6109 pbool = &target;
6110 else
6111 poval = &target;
6113 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
6114 false, MEMMODEL_SYNC_SEQ_CST,
6115 MEMMODEL_SYNC_SEQ_CST))
6116 return NULL_RTX;
6118 return target;
6121 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6122 general form is actually an atomic exchange, and some targets only
6123 support a reduced form with the second argument being a constant 1.
6124 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6125 the results. */
6127 static rtx
6128 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
6129 rtx target)
6131 rtx val, mem;
6133 /* Expand the operands. */
6134 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6135 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6137 return expand_sync_lock_test_and_set (target, mem, val);
6140 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6142 static void
6143 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
6145 rtx mem;
6147 /* Expand the operands. */
6148 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6150 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
6153 /* Given an integer representing an ``enum memmodel'', verify its
6154 correctness and return the memory model enum. */
6156 static enum memmodel
6157 get_memmodel (tree exp)
6159 rtx op;
6160 unsigned HOST_WIDE_INT val;
6161 location_t loc
6162 = expansion_point_location_if_in_system_header (input_location);
6164 /* If the parameter is not a constant, it's a run time value so we'll just
6165 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6166 if (TREE_CODE (exp) != INTEGER_CST)
6167 return MEMMODEL_SEQ_CST;
6169 op = expand_normal (exp);
6171 val = INTVAL (op);
6172 if (targetm.memmodel_check)
6173 val = targetm.memmodel_check (val);
6174 else if (val & ~MEMMODEL_MASK)
6176 warning_at (loc, OPT_Winvalid_memory_model,
6177 "unknown architecture specifier in memory model to builtin");
6178 return MEMMODEL_SEQ_CST;
6181 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6182 if (memmodel_base (val) >= MEMMODEL_LAST)
6184 warning_at (loc, OPT_Winvalid_memory_model,
6185 "invalid memory model argument to builtin");
6186 return MEMMODEL_SEQ_CST;
6189 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6190 be conservative and promote consume to acquire. */
6191 if (val == MEMMODEL_CONSUME)
6192 val = MEMMODEL_ACQUIRE;
6194 return (enum memmodel) val;
6197 /* Expand the __atomic_exchange intrinsic:
6198 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6199 EXP is the CALL_EXPR.
6200 TARGET is an optional place for us to store the results. */
6202 static rtx
6203 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
6205 rtx val, mem;
6206 enum memmodel model;
6208 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6210 if (!flag_inline_atomics)
6211 return NULL_RTX;
6213 /* Expand the operands. */
6214 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6215 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6217 return expand_atomic_exchange (target, mem, val, model);
6220 /* Expand the __atomic_compare_exchange intrinsic:
6221 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6222 TYPE desired, BOOL weak,
6223 enum memmodel success,
6224 enum memmodel failure)
6225 EXP is the CALL_EXPR.
6226 TARGET is an optional place for us to store the results. */
6228 static rtx
6229 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
6230 rtx target)
6232 rtx expect, desired, mem, oldval;
6233 rtx_code_label *label;
6234 enum memmodel success, failure;
6235 tree weak;
6236 bool is_weak;
6237 location_t loc
6238 = expansion_point_location_if_in_system_header (input_location);
6240 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6241 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6243 if (failure > success)
6245 warning_at (loc, OPT_Winvalid_memory_model,
6246 "failure memory model cannot be stronger than success "
6247 "memory model for %<__atomic_compare_exchange%>");
6248 success = MEMMODEL_SEQ_CST;
6251 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6253 warning_at (loc, OPT_Winvalid_memory_model,
6254 "invalid failure memory model for "
6255 "%<__atomic_compare_exchange%>");
6256 failure = MEMMODEL_SEQ_CST;
6257 success = MEMMODEL_SEQ_CST;
6261 if (!flag_inline_atomics)
6262 return NULL_RTX;
6264 /* Expand the operands. */
6265 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6267 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6268 expect = convert_memory_address (Pmode, expect);
6269 expect = gen_rtx_MEM (mode, expect);
6270 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6272 weak = CALL_EXPR_ARG (exp, 3);
6273 is_weak = false;
6274 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6275 is_weak = true;
6277 if (target == const0_rtx)
6278 target = NULL;
6280 /* Lest the rtl backend create a race condition with an imporoper store
6281 to memory, always create a new pseudo for OLDVAL. */
6282 oldval = NULL;
6284 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6285 is_weak, success, failure))
6286 return NULL_RTX;
6288 /* Conditionally store back to EXPECT, lest we create a race condition
6289 with an improper store to memory. */
6290 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6291 the normal case where EXPECT is totally private, i.e. a register. At
6292 which point the store can be unconditional. */
6293 label = gen_label_rtx ();
6294 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6295 GET_MODE (target), 1, label);
6296 emit_move_insn (expect, oldval);
6297 emit_label (label);
6299 return target;
6302 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6303 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6304 call. The weak parameter must be dropped to match the expected parameter
6305 list and the expected argument changed from value to pointer to memory
6306 slot. */
6308 static void
6309 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6311 unsigned int z;
6312 vec<tree, va_gc> *vec;
6314 vec_alloc (vec, 5);
6315 vec->quick_push (gimple_call_arg (call, 0));
6316 tree expected = gimple_call_arg (call, 1);
6317 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6318 TREE_TYPE (expected));
6319 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6320 if (expd != x)
6321 emit_move_insn (x, expd);
6322 tree v = make_tree (TREE_TYPE (expected), x);
6323 vec->quick_push (build1 (ADDR_EXPR,
6324 build_pointer_type (TREE_TYPE (expected)), v));
6325 vec->quick_push (gimple_call_arg (call, 2));
6326 /* Skip the boolean weak parameter. */
6327 for (z = 4; z < 6; z++)
6328 vec->quick_push (gimple_call_arg (call, z));
6329 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6330 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6331 gcc_assert (bytes_log2 < 5);
6332 built_in_function fncode
6333 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6334 + bytes_log2);
6335 tree fndecl = builtin_decl_explicit (fncode);
6336 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6337 fndecl);
6338 tree exp = build_call_vec (boolean_type_node, fn, vec);
6339 tree lhs = gimple_call_lhs (call);
6340 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6341 if (lhs)
6343 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6344 if (GET_MODE (boolret) != mode)
6345 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6346 x = force_reg (mode, x);
6347 write_complex_part (target, boolret, true);
6348 write_complex_part (target, x, false);
6352 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6354 void
6355 expand_ifn_atomic_compare_exchange (gcall *call)
6357 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6358 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6359 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6360 rtx expect, desired, mem, oldval, boolret;
6361 enum memmodel success, failure;
6362 tree lhs;
6363 bool is_weak;
6364 location_t loc
6365 = expansion_point_location_if_in_system_header (gimple_location (call));
6367 success = get_memmodel (gimple_call_arg (call, 4));
6368 failure = get_memmodel (gimple_call_arg (call, 5));
6370 if (failure > success)
6372 warning_at (loc, OPT_Winvalid_memory_model,
6373 "failure memory model cannot be stronger than success "
6374 "memory model for %<__atomic_compare_exchange%>");
6375 success = MEMMODEL_SEQ_CST;
6378 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6380 warning_at (loc, OPT_Winvalid_memory_model,
6381 "invalid failure memory model for "
6382 "%<__atomic_compare_exchange%>");
6383 failure = MEMMODEL_SEQ_CST;
6384 success = MEMMODEL_SEQ_CST;
6387 if (!flag_inline_atomics)
6389 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6390 return;
6393 /* Expand the operands. */
6394 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6396 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6397 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6399 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6401 boolret = NULL;
6402 oldval = NULL;
6404 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6405 is_weak, success, failure))
6407 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6408 return;
6411 lhs = gimple_call_lhs (call);
6412 if (lhs)
6414 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6415 if (GET_MODE (boolret) != mode)
6416 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6417 write_complex_part (target, boolret, true);
6418 write_complex_part (target, oldval, false);
6422 /* Expand the __atomic_load intrinsic:
6423 TYPE __atomic_load (TYPE *object, enum memmodel)
6424 EXP is the CALL_EXPR.
6425 TARGET is an optional place for us to store the results. */
6427 static rtx
6428 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6430 rtx mem;
6431 enum memmodel model;
6433 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6434 if (is_mm_release (model) || is_mm_acq_rel (model))
6436 location_t loc
6437 = expansion_point_location_if_in_system_header (input_location);
6438 warning_at (loc, OPT_Winvalid_memory_model,
6439 "invalid memory model for %<__atomic_load%>");
6440 model = MEMMODEL_SEQ_CST;
6443 if (!flag_inline_atomics)
6444 return NULL_RTX;
6446 /* Expand the operand. */
6447 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6449 return expand_atomic_load (target, mem, model);
6453 /* Expand the __atomic_store intrinsic:
6454 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6455 EXP is the CALL_EXPR.
6456 TARGET is an optional place for us to store the results. */
6458 static rtx
6459 expand_builtin_atomic_store (machine_mode mode, tree exp)
6461 rtx mem, val;
6462 enum memmodel model;
6464 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6465 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6466 || is_mm_release (model)))
6468 location_t loc
6469 = expansion_point_location_if_in_system_header (input_location);
6470 warning_at (loc, OPT_Winvalid_memory_model,
6471 "invalid memory model for %<__atomic_store%>");
6472 model = MEMMODEL_SEQ_CST;
6475 if (!flag_inline_atomics)
6476 return NULL_RTX;
6478 /* Expand the operands. */
6479 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6480 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6482 return expand_atomic_store (mem, val, model, false);
6485 /* Expand the __atomic_fetch_XXX intrinsic:
6486 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6487 EXP is the CALL_EXPR.
6488 TARGET is an optional place for us to store the results.
6489 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6490 FETCH_AFTER is true if returning the result of the operation.
6491 FETCH_AFTER is false if returning the value before the operation.
6492 IGNORE is true if the result is not used.
6493 EXT_CALL is the correct builtin for an external call if this cannot be
6494 resolved to an instruction sequence. */
6496 static rtx
6497 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6498 enum rtx_code code, bool fetch_after,
6499 bool ignore, enum built_in_function ext_call)
6501 rtx val, mem, ret;
6502 enum memmodel model;
6503 tree fndecl;
6504 tree addr;
6506 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6508 /* Expand the operands. */
6509 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6510 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6512 /* Only try generating instructions if inlining is turned on. */
6513 if (flag_inline_atomics)
6515 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6516 if (ret)
6517 return ret;
6520 /* Return if a different routine isn't needed for the library call. */
6521 if (ext_call == BUILT_IN_NONE)
6522 return NULL_RTX;
6524 /* Change the call to the specified function. */
6525 fndecl = get_callee_fndecl (exp);
6526 addr = CALL_EXPR_FN (exp);
6527 STRIP_NOPS (addr);
6529 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6530 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6532 /* If we will emit code after the call, the call cannot be a tail call.
6533 If it is emitted as a tail call, a barrier is emitted after it, and
6534 then all trailing code is removed. */
6535 if (!ignore)
6536 CALL_EXPR_TAILCALL (exp) = 0;
6538 /* Expand the call here so we can emit trailing code. */
6539 ret = expand_call (exp, target, ignore);
6541 /* Replace the original function just in case it matters. */
6542 TREE_OPERAND (addr, 0) = fndecl;
6544 /* Then issue the arithmetic correction to return the right result. */
6545 if (!ignore)
6547 if (code == NOT)
6549 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6550 OPTAB_LIB_WIDEN);
6551 ret = expand_simple_unop (mode, NOT, ret, target, true);
6553 else
6554 ret = expand_simple_binop (mode, code, ret, val, target, true,
6555 OPTAB_LIB_WIDEN);
6557 return ret;
6560 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6562 void
6563 expand_ifn_atomic_bit_test_and (gcall *call)
6565 tree ptr = gimple_call_arg (call, 0);
6566 tree bit = gimple_call_arg (call, 1);
6567 tree flag = gimple_call_arg (call, 2);
6568 tree lhs = gimple_call_lhs (call);
6569 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6570 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6571 enum rtx_code code;
6572 optab optab;
6573 struct expand_operand ops[5];
6575 gcc_assert (flag_inline_atomics);
6577 if (gimple_call_num_args (call) == 4)
6578 model = get_memmodel (gimple_call_arg (call, 3));
6580 rtx mem = get_builtin_sync_mem (ptr, mode);
6581 rtx val = expand_expr_force_mode (bit, mode);
6583 switch (gimple_call_internal_fn (call))
6585 case IFN_ATOMIC_BIT_TEST_AND_SET:
6586 code = IOR;
6587 optab = atomic_bit_test_and_set_optab;
6588 break;
6589 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6590 code = XOR;
6591 optab = atomic_bit_test_and_complement_optab;
6592 break;
6593 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6594 code = AND;
6595 optab = atomic_bit_test_and_reset_optab;
6596 break;
6597 default:
6598 gcc_unreachable ();
6601 if (lhs == NULL_TREE)
6603 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6604 val, NULL_RTX, true, OPTAB_DIRECT);
6605 if (code == AND)
6606 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6607 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6608 return;
6611 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6612 enum insn_code icode = direct_optab_handler (optab, mode);
6613 gcc_assert (icode != CODE_FOR_nothing);
6614 create_output_operand (&ops[0], target, mode);
6615 create_fixed_operand (&ops[1], mem);
6616 create_convert_operand_to (&ops[2], val, mode, true);
6617 create_integer_operand (&ops[3], model);
6618 create_integer_operand (&ops[4], integer_onep (flag));
6619 if (maybe_expand_insn (icode, 5, ops))
6620 return;
6622 rtx bitval = val;
6623 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6624 val, NULL_RTX, true, OPTAB_DIRECT);
6625 rtx maskval = val;
6626 if (code == AND)
6627 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6628 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6629 code, model, false);
6630 if (integer_onep (flag))
6632 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6633 NULL_RTX, true, OPTAB_DIRECT);
6634 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6635 true, OPTAB_DIRECT);
6637 else
6638 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6639 OPTAB_DIRECT);
6640 if (result != target)
6641 emit_move_insn (target, result);
6644 /* Expand an atomic clear operation.
6645 void _atomic_clear (BOOL *obj, enum memmodel)
6646 EXP is the call expression. */
6648 static rtx
6649 expand_builtin_atomic_clear (tree exp)
6651 machine_mode mode;
6652 rtx mem, ret;
6653 enum memmodel model;
6655 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6656 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6657 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6659 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6661 location_t loc
6662 = expansion_point_location_if_in_system_header (input_location);
6663 warning_at (loc, OPT_Winvalid_memory_model,
6664 "invalid memory model for %<__atomic_store%>");
6665 model = MEMMODEL_SEQ_CST;
6668 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6669 Failing that, a store is issued by __atomic_store. The only way this can
6670 fail is if the bool type is larger than a word size. Unlikely, but
6671 handle it anyway for completeness. Assume a single threaded model since
6672 there is no atomic support in this case, and no barriers are required. */
6673 ret = expand_atomic_store (mem, const0_rtx, model, true);
6674 if (!ret)
6675 emit_move_insn (mem, const0_rtx);
6676 return const0_rtx;
6679 /* Expand an atomic test_and_set operation.
6680 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6681 EXP is the call expression. */
6683 static rtx
6684 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6686 rtx mem;
6687 enum memmodel model;
6688 machine_mode mode;
6690 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6691 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6692 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6694 return expand_atomic_test_and_set (target, mem, model);
6698 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6699 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6701 static tree
6702 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6704 int size;
6705 machine_mode mode;
6706 unsigned int mode_align, type_align;
6708 if (TREE_CODE (arg0) != INTEGER_CST)
6709 return NULL_TREE;
6711 /* We need a corresponding integer mode for the access to be lock-free. */
6712 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6713 if (!int_mode_for_size (size, 0).exists (&mode))
6714 return boolean_false_node;
6716 mode_align = GET_MODE_ALIGNMENT (mode);
6718 if (TREE_CODE (arg1) == INTEGER_CST)
6720 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6722 /* Either this argument is null, or it's a fake pointer encoding
6723 the alignment of the object. */
6724 val = least_bit_hwi (val);
6725 val *= BITS_PER_UNIT;
6727 if (val == 0 || mode_align < val)
6728 type_align = mode_align;
6729 else
6730 type_align = val;
6732 else
6734 tree ttype = TREE_TYPE (arg1);
6736 /* This function is usually invoked and folded immediately by the front
6737 end before anything else has a chance to look at it. The pointer
6738 parameter at this point is usually cast to a void *, so check for that
6739 and look past the cast. */
6740 if (CONVERT_EXPR_P (arg1)
6741 && POINTER_TYPE_P (ttype)
6742 && VOID_TYPE_P (TREE_TYPE (ttype))
6743 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6744 arg1 = TREE_OPERAND (arg1, 0);
6746 ttype = TREE_TYPE (arg1);
6747 gcc_assert (POINTER_TYPE_P (ttype));
6749 /* Get the underlying type of the object. */
6750 ttype = TREE_TYPE (ttype);
6751 type_align = TYPE_ALIGN (ttype);
6754 /* If the object has smaller alignment, the lock free routines cannot
6755 be used. */
6756 if (type_align < mode_align)
6757 return boolean_false_node;
6759 /* Check if a compare_and_swap pattern exists for the mode which represents
6760 the required size. The pattern is not allowed to fail, so the existence
6761 of the pattern indicates support is present. Also require that an
6762 atomic load exists for the required size. */
6763 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6764 return boolean_true_node;
6765 else
6766 return boolean_false_node;
6769 /* Return true if the parameters to call EXP represent an object which will
6770 always generate lock free instructions. The first argument represents the
6771 size of the object, and the second parameter is a pointer to the object
6772 itself. If NULL is passed for the object, then the result is based on
6773 typical alignment for an object of the specified size. Otherwise return
6774 false. */
6776 static rtx
6777 expand_builtin_atomic_always_lock_free (tree exp)
6779 tree size;
6780 tree arg0 = CALL_EXPR_ARG (exp, 0);
6781 tree arg1 = CALL_EXPR_ARG (exp, 1);
6783 if (TREE_CODE (arg0) != INTEGER_CST)
6785 error ("non-constant argument 1 to __atomic_always_lock_free");
6786 return const0_rtx;
6789 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6790 if (size == boolean_true_node)
6791 return const1_rtx;
6792 return const0_rtx;
6795 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6796 is lock free on this architecture. */
6798 static tree
6799 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6801 if (!flag_inline_atomics)
6802 return NULL_TREE;
6804 /* If it isn't always lock free, don't generate a result. */
6805 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6806 return boolean_true_node;
6808 return NULL_TREE;
6811 /* Return true if the parameters to call EXP represent an object which will
6812 always generate lock free instructions. The first argument represents the
6813 size of the object, and the second parameter is a pointer to the object
6814 itself. If NULL is passed for the object, then the result is based on
6815 typical alignment for an object of the specified size. Otherwise return
6816 NULL*/
6818 static rtx
6819 expand_builtin_atomic_is_lock_free (tree exp)
6821 tree size;
6822 tree arg0 = CALL_EXPR_ARG (exp, 0);
6823 tree arg1 = CALL_EXPR_ARG (exp, 1);
6825 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6827 error ("non-integer argument 1 to __atomic_is_lock_free");
6828 return NULL_RTX;
6831 if (!flag_inline_atomics)
6832 return NULL_RTX;
6834 /* If the value is known at compile time, return the RTX for it. */
6835 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6836 if (size == boolean_true_node)
6837 return const1_rtx;
6839 return NULL_RTX;
6842 /* Expand the __atomic_thread_fence intrinsic:
6843 void __atomic_thread_fence (enum memmodel)
6844 EXP is the CALL_EXPR. */
6846 static void
6847 expand_builtin_atomic_thread_fence (tree exp)
6849 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6850 expand_mem_thread_fence (model);
6853 /* Expand the __atomic_signal_fence intrinsic:
6854 void __atomic_signal_fence (enum memmodel)
6855 EXP is the CALL_EXPR. */
6857 static void
6858 expand_builtin_atomic_signal_fence (tree exp)
6860 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6861 expand_mem_signal_fence (model);
6864 /* Expand the __sync_synchronize intrinsic. */
6866 static void
6867 expand_builtin_sync_synchronize (void)
6869 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6872 static rtx
6873 expand_builtin_thread_pointer (tree exp, rtx target)
6875 enum insn_code icode;
6876 if (!validate_arglist (exp, VOID_TYPE))
6877 return const0_rtx;
6878 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6879 if (icode != CODE_FOR_nothing)
6881 struct expand_operand op;
6882 /* If the target is not sutitable then create a new target. */
6883 if (target == NULL_RTX
6884 || !REG_P (target)
6885 || GET_MODE (target) != Pmode)
6886 target = gen_reg_rtx (Pmode);
6887 create_output_operand (&op, target, Pmode);
6888 expand_insn (icode, 1, &op);
6889 return target;
6891 error ("__builtin_thread_pointer is not supported on this target");
6892 return const0_rtx;
6895 static void
6896 expand_builtin_set_thread_pointer (tree exp)
6898 enum insn_code icode;
6899 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6900 return;
6901 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6902 if (icode != CODE_FOR_nothing)
6904 struct expand_operand op;
6905 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6906 Pmode, EXPAND_NORMAL);
6907 create_input_operand (&op, val, Pmode);
6908 expand_insn (icode, 1, &op);
6909 return;
6911 error ("__builtin_set_thread_pointer is not supported on this target");
6915 /* Emit code to restore the current value of stack. */
6917 static void
6918 expand_stack_restore (tree var)
6920 rtx_insn *prev;
6921 rtx sa = expand_normal (var);
6923 sa = convert_memory_address (Pmode, sa);
6925 prev = get_last_insn ();
6926 emit_stack_restore (SAVE_BLOCK, sa);
6928 record_new_stack_level ();
6930 fixup_args_size_notes (prev, get_last_insn (), 0);
6933 /* Emit code to save the current value of stack. */
6935 static rtx
6936 expand_stack_save (void)
6938 rtx ret = NULL_RTX;
6940 emit_stack_save (SAVE_BLOCK, &ret);
6941 return ret;
6944 /* Emit code to get the openacc gang, worker or vector id or size. */
6946 static rtx
6947 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
6949 const char *name;
6950 rtx fallback_retval;
6951 rtx_insn *(*gen_fn) (rtx, rtx);
6952 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
6954 case BUILT_IN_GOACC_PARLEVEL_ID:
6955 name = "__builtin_goacc_parlevel_id";
6956 fallback_retval = const0_rtx;
6957 gen_fn = targetm.gen_oacc_dim_pos;
6958 break;
6959 case BUILT_IN_GOACC_PARLEVEL_SIZE:
6960 name = "__builtin_goacc_parlevel_size";
6961 fallback_retval = const1_rtx;
6962 gen_fn = targetm.gen_oacc_dim_size;
6963 break;
6964 default:
6965 gcc_unreachable ();
6968 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
6970 error ("%qs only supported in OpenACC code", name);
6971 return const0_rtx;
6974 tree arg = CALL_EXPR_ARG (exp, 0);
6975 if (TREE_CODE (arg) != INTEGER_CST)
6977 error ("non-constant argument 0 to %qs", name);
6978 return const0_rtx;
6981 int dim = TREE_INT_CST_LOW (arg);
6982 switch (dim)
6984 case GOMP_DIM_GANG:
6985 case GOMP_DIM_WORKER:
6986 case GOMP_DIM_VECTOR:
6987 break;
6988 default:
6989 error ("illegal argument 0 to %qs", name);
6990 return const0_rtx;
6993 if (ignore)
6994 return target;
6996 if (target == NULL_RTX)
6997 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6999 if (!targetm.have_oacc_dim_size ())
7001 emit_move_insn (target, fallback_retval);
7002 return target;
7005 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
7006 emit_insn (gen_fn (reg, GEN_INT (dim)));
7007 if (reg != target)
7008 emit_move_insn (target, reg);
7010 return target;
7013 /* Expand a string compare operation using a sequence of char comparison
7014 to get rid of the calling overhead, with result going to TARGET if
7015 that's convenient.
7017 VAR_STR is the variable string source;
7018 CONST_STR is the constant string source;
7019 LENGTH is the number of chars to compare;
7020 CONST_STR_N indicates which source string is the constant string;
7021 IS_MEMCMP indicates whether it's a memcmp or strcmp.
7023 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7025 target = (int) (unsigned char) var_str[0]
7026 - (int) (unsigned char) const_str[0];
7027 if (target != 0)
7028 goto ne_label;
7030 target = (int) (unsigned char) var_str[length - 2]
7031 - (int) (unsigned char) const_str[length - 2];
7032 if (target != 0)
7033 goto ne_label;
7034 target = (int) (unsigned char) var_str[length - 1]
7035 - (int) (unsigned char) const_str[length - 1];
7036 ne_label:
7039 static rtx
7040 inline_string_cmp (rtx target, tree var_str, const char *const_str,
7041 unsigned HOST_WIDE_INT length,
7042 int const_str_n, machine_mode mode)
7044 HOST_WIDE_INT offset = 0;
7045 rtx var_rtx_array
7046 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
7047 rtx var_rtx = NULL_RTX;
7048 rtx const_rtx = NULL_RTX;
7049 rtx result = target ? target : gen_reg_rtx (mode);
7050 rtx_code_label *ne_label = gen_label_rtx ();
7051 tree unit_type_node = unsigned_char_type_node;
7052 scalar_int_mode unit_mode
7053 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
7055 start_sequence ();
7057 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
7059 var_rtx
7060 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
7061 const_rtx = c_readstr (const_str + offset, unit_mode);
7062 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
7063 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
7065 op0 = convert_modes (mode, unit_mode, op0, 1);
7066 op1 = convert_modes (mode, unit_mode, op1, 1);
7067 result = expand_simple_binop (mode, MINUS, op0, op1,
7068 result, 1, OPTAB_WIDEN);
7069 if (i < length - 1)
7070 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
7071 mode, true, ne_label);
7072 offset += GET_MODE_SIZE (unit_mode);
7075 emit_label (ne_label);
7076 rtx_insn *insns = get_insns ();
7077 end_sequence ();
7078 emit_insn (insns);
7080 return result;
7083 /* Inline expansion a call to str(n)cmp, with result going to
7084 TARGET if that's convenient.
7085 If the call is not been inlined, return NULL_RTX. */
7086 static rtx
7087 inline_expand_builtin_string_cmp (tree exp, rtx target)
7089 tree fndecl = get_callee_fndecl (exp);
7090 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7091 unsigned HOST_WIDE_INT length = 0;
7092 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
7094 /* Do NOT apply this inlining expansion when optimizing for size or
7095 optimization level below 2. */
7096 if (optimize < 2 || optimize_insn_for_size_p ())
7097 return NULL_RTX;
7099 gcc_checking_assert (fcode == BUILT_IN_STRCMP
7100 || fcode == BUILT_IN_STRNCMP
7101 || fcode == BUILT_IN_MEMCMP);
7103 /* On a target where the type of the call (int) has same or narrower presicion
7104 than unsigned char, give up the inlining expansion. */
7105 if (TYPE_PRECISION (unsigned_char_type_node)
7106 >= TYPE_PRECISION (TREE_TYPE (exp)))
7107 return NULL_RTX;
7109 tree arg1 = CALL_EXPR_ARG (exp, 0);
7110 tree arg2 = CALL_EXPR_ARG (exp, 1);
7111 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
7113 unsigned HOST_WIDE_INT len1 = 0;
7114 unsigned HOST_WIDE_INT len2 = 0;
7115 unsigned HOST_WIDE_INT len3 = 0;
7117 const char *src_str1 = c_getstr (arg1, &len1);
7118 const char *src_str2 = c_getstr (arg2, &len2);
7120 /* If neither strings is constant string, the call is not qualify. */
7121 if (!src_str1 && !src_str2)
7122 return NULL_RTX;
7124 /* For strncmp, if the length is not a const, not qualify. */
7125 if (is_ncmp && !tree_fits_uhwi_p (len3_tree))
7126 return NULL_RTX;
7128 int const_str_n = 0;
7129 if (!len1)
7130 const_str_n = 2;
7131 else if (!len2)
7132 const_str_n = 1;
7133 else if (len2 > len1)
7134 const_str_n = 1;
7135 else
7136 const_str_n = 2;
7138 gcc_checking_assert (const_str_n > 0);
7139 length = (const_str_n == 1) ? len1 : len2;
7141 if (is_ncmp && (len3 = tree_to_uhwi (len3_tree)) < length)
7142 length = len3;
7144 /* If the length of the comparision is larger than the threshold,
7145 do nothing. */
7146 if (length > (unsigned HOST_WIDE_INT)
7147 PARAM_VALUE (BUILTIN_STRING_CMP_INLINE_LENGTH))
7148 return NULL_RTX;
7150 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7152 /* Now, start inline expansion the call. */
7153 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
7154 (const_str_n == 1) ? src_str1 : src_str2, length,
7155 const_str_n, mode);
7158 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7159 represents the size of the first argument to that call, or VOIDmode
7160 if the argument is a pointer. IGNORE will be true if the result
7161 isn't used. */
7162 static rtx
7163 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
7164 bool ignore)
7166 rtx val, failsafe;
7167 unsigned nargs = call_expr_nargs (exp);
7169 tree arg0 = CALL_EXPR_ARG (exp, 0);
7171 if (mode == VOIDmode)
7173 mode = TYPE_MODE (TREE_TYPE (arg0));
7174 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
7177 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
7179 /* An optional second argument can be used as a failsafe value on
7180 some machines. If it isn't present, then the failsafe value is
7181 assumed to be 0. */
7182 if (nargs > 1)
7184 tree arg1 = CALL_EXPR_ARG (exp, 1);
7185 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
7187 else
7188 failsafe = const0_rtx;
7190 /* If the result isn't used, the behavior is undefined. It would be
7191 nice to emit a warning here, but path splitting means this might
7192 happen with legitimate code. So simply drop the builtin
7193 expansion in that case; we've handled any side-effects above. */
7194 if (ignore)
7195 return const0_rtx;
7197 /* If we don't have a suitable target, create one to hold the result. */
7198 if (target == NULL || GET_MODE (target) != mode)
7199 target = gen_reg_rtx (mode);
7201 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7202 val = convert_modes (mode, VOIDmode, val, false);
7204 return targetm.speculation_safe_value (mode, target, val, failsafe);
7207 /* Expand an expression EXP that calls a built-in function,
7208 with result going to TARGET if that's convenient
7209 (and in mode MODE if that's convenient).
7210 SUBTARGET may be used as the target for computing one of EXP's operands.
7211 IGNORE is nonzero if the value is to be ignored. */
7214 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7215 int ignore)
7217 tree fndecl = get_callee_fndecl (exp);
7218 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7219 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7220 int flags;
7222 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7223 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7225 /* When ASan is enabled, we don't want to expand some memory/string
7226 builtins and rely on libsanitizer's hooks. This allows us to avoid
7227 redundant checks and be sure, that possible overflow will be detected
7228 by ASan. */
7230 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7231 return expand_call (exp, target, ignore);
7233 /* When not optimizing, generate calls to library functions for a certain
7234 set of builtins. */
7235 if (!optimize
7236 && !called_as_built_in (fndecl)
7237 && fcode != BUILT_IN_FORK
7238 && fcode != BUILT_IN_EXECL
7239 && fcode != BUILT_IN_EXECV
7240 && fcode != BUILT_IN_EXECLP
7241 && fcode != BUILT_IN_EXECLE
7242 && fcode != BUILT_IN_EXECVP
7243 && fcode != BUILT_IN_EXECVE
7244 && !ALLOCA_FUNCTION_CODE_P (fcode)
7245 && fcode != BUILT_IN_FREE)
7246 return expand_call (exp, target, ignore);
7248 /* The built-in function expanders test for target == const0_rtx
7249 to determine whether the function's result will be ignored. */
7250 if (ignore)
7251 target = const0_rtx;
7253 /* If the result of a pure or const built-in function is ignored, and
7254 none of its arguments are volatile, we can avoid expanding the
7255 built-in call and just evaluate the arguments for side-effects. */
7256 if (target == const0_rtx
7257 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7258 && !(flags & ECF_LOOPING_CONST_OR_PURE))
7260 bool volatilep = false;
7261 tree arg;
7262 call_expr_arg_iterator iter;
7264 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7265 if (TREE_THIS_VOLATILE (arg))
7267 volatilep = true;
7268 break;
7271 if (! volatilep)
7273 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7274 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7275 return const0_rtx;
7279 switch (fcode)
7281 CASE_FLT_FN (BUILT_IN_FABS):
7282 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7283 case BUILT_IN_FABSD32:
7284 case BUILT_IN_FABSD64:
7285 case BUILT_IN_FABSD128:
7286 target = expand_builtin_fabs (exp, target, subtarget);
7287 if (target)
7288 return target;
7289 break;
7291 CASE_FLT_FN (BUILT_IN_COPYSIGN):
7292 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7293 target = expand_builtin_copysign (exp, target, subtarget);
7294 if (target)
7295 return target;
7296 break;
7298 /* Just do a normal library call if we were unable to fold
7299 the values. */
7300 CASE_FLT_FN (BUILT_IN_CABS):
7301 break;
7303 CASE_FLT_FN (BUILT_IN_FMA):
7304 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7305 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7306 if (target)
7307 return target;
7308 break;
7310 CASE_FLT_FN (BUILT_IN_ILOGB):
7311 if (! flag_unsafe_math_optimizations)
7312 break;
7313 gcc_fallthrough ();
7314 CASE_FLT_FN (BUILT_IN_ISINF):
7315 CASE_FLT_FN (BUILT_IN_FINITE):
7316 case BUILT_IN_ISFINITE:
7317 case BUILT_IN_ISNORMAL:
7318 target = expand_builtin_interclass_mathfn (exp, target);
7319 if (target)
7320 return target;
7321 break;
7323 CASE_FLT_FN (BUILT_IN_ICEIL):
7324 CASE_FLT_FN (BUILT_IN_LCEIL):
7325 CASE_FLT_FN (BUILT_IN_LLCEIL):
7326 CASE_FLT_FN (BUILT_IN_LFLOOR):
7327 CASE_FLT_FN (BUILT_IN_IFLOOR):
7328 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7329 target = expand_builtin_int_roundingfn (exp, target);
7330 if (target)
7331 return target;
7332 break;
7334 CASE_FLT_FN (BUILT_IN_IRINT):
7335 CASE_FLT_FN (BUILT_IN_LRINT):
7336 CASE_FLT_FN (BUILT_IN_LLRINT):
7337 CASE_FLT_FN (BUILT_IN_IROUND):
7338 CASE_FLT_FN (BUILT_IN_LROUND):
7339 CASE_FLT_FN (BUILT_IN_LLROUND):
7340 target = expand_builtin_int_roundingfn_2 (exp, target);
7341 if (target)
7342 return target;
7343 break;
7345 CASE_FLT_FN (BUILT_IN_POWI):
7346 target = expand_builtin_powi (exp, target);
7347 if (target)
7348 return target;
7349 break;
7351 CASE_FLT_FN (BUILT_IN_CEXPI):
7352 target = expand_builtin_cexpi (exp, target);
7353 gcc_assert (target);
7354 return target;
7356 CASE_FLT_FN (BUILT_IN_SIN):
7357 CASE_FLT_FN (BUILT_IN_COS):
7358 if (! flag_unsafe_math_optimizations)
7359 break;
7360 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7361 if (target)
7362 return target;
7363 break;
7365 CASE_FLT_FN (BUILT_IN_SINCOS):
7366 if (! flag_unsafe_math_optimizations)
7367 break;
7368 target = expand_builtin_sincos (exp);
7369 if (target)
7370 return target;
7371 break;
7373 case BUILT_IN_APPLY_ARGS:
7374 return expand_builtin_apply_args ();
7376 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7377 FUNCTION with a copy of the parameters described by
7378 ARGUMENTS, and ARGSIZE. It returns a block of memory
7379 allocated on the stack into which is stored all the registers
7380 that might possibly be used for returning the result of a
7381 function. ARGUMENTS is the value returned by
7382 __builtin_apply_args. ARGSIZE is the number of bytes of
7383 arguments that must be copied. ??? How should this value be
7384 computed? We'll also need a safe worst case value for varargs
7385 functions. */
7386 case BUILT_IN_APPLY:
7387 if (!validate_arglist (exp, POINTER_TYPE,
7388 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7389 && !validate_arglist (exp, REFERENCE_TYPE,
7390 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7391 return const0_rtx;
7392 else
7394 rtx ops[3];
7396 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7397 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7398 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7400 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7403 /* __builtin_return (RESULT) causes the function to return the
7404 value described by RESULT. RESULT is address of the block of
7405 memory returned by __builtin_apply. */
7406 case BUILT_IN_RETURN:
7407 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7408 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7409 return const0_rtx;
7411 case BUILT_IN_SAVEREGS:
7412 return expand_builtin_saveregs ();
7414 case BUILT_IN_VA_ARG_PACK:
7415 /* All valid uses of __builtin_va_arg_pack () are removed during
7416 inlining. */
7417 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
7418 return const0_rtx;
7420 case BUILT_IN_VA_ARG_PACK_LEN:
7421 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7422 inlining. */
7423 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
7424 return const0_rtx;
7426 /* Return the address of the first anonymous stack arg. */
7427 case BUILT_IN_NEXT_ARG:
7428 if (fold_builtin_next_arg (exp, false))
7429 return const0_rtx;
7430 return expand_builtin_next_arg ();
7432 case BUILT_IN_CLEAR_CACHE:
7433 target = expand_builtin___clear_cache (exp);
7434 if (target)
7435 return target;
7436 break;
7438 case BUILT_IN_CLASSIFY_TYPE:
7439 return expand_builtin_classify_type (exp);
7441 case BUILT_IN_CONSTANT_P:
7442 return const0_rtx;
7444 case BUILT_IN_FRAME_ADDRESS:
7445 case BUILT_IN_RETURN_ADDRESS:
7446 return expand_builtin_frame_address (fndecl, exp);
7448 /* Returns the address of the area where the structure is returned.
7449 0 otherwise. */
7450 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7451 if (call_expr_nargs (exp) != 0
7452 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7453 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7454 return const0_rtx;
7455 else
7456 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7458 CASE_BUILT_IN_ALLOCA:
7459 target = expand_builtin_alloca (exp);
7460 if (target)
7461 return target;
7462 break;
7464 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7465 return expand_asan_emit_allocas_unpoison (exp);
7467 case BUILT_IN_STACK_SAVE:
7468 return expand_stack_save ();
7470 case BUILT_IN_STACK_RESTORE:
7471 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7472 return const0_rtx;
7474 case BUILT_IN_BSWAP16:
7475 case BUILT_IN_BSWAP32:
7476 case BUILT_IN_BSWAP64:
7477 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7478 if (target)
7479 return target;
7480 break;
7482 CASE_INT_FN (BUILT_IN_FFS):
7483 target = expand_builtin_unop (target_mode, exp, target,
7484 subtarget, ffs_optab);
7485 if (target)
7486 return target;
7487 break;
7489 CASE_INT_FN (BUILT_IN_CLZ):
7490 target = expand_builtin_unop (target_mode, exp, target,
7491 subtarget, clz_optab);
7492 if (target)
7493 return target;
7494 break;
7496 CASE_INT_FN (BUILT_IN_CTZ):
7497 target = expand_builtin_unop (target_mode, exp, target,
7498 subtarget, ctz_optab);
7499 if (target)
7500 return target;
7501 break;
7503 CASE_INT_FN (BUILT_IN_CLRSB):
7504 target = expand_builtin_unop (target_mode, exp, target,
7505 subtarget, clrsb_optab);
7506 if (target)
7507 return target;
7508 break;
7510 CASE_INT_FN (BUILT_IN_POPCOUNT):
7511 target = expand_builtin_unop (target_mode, exp, target,
7512 subtarget, popcount_optab);
7513 if (target)
7514 return target;
7515 break;
7517 CASE_INT_FN (BUILT_IN_PARITY):
7518 target = expand_builtin_unop (target_mode, exp, target,
7519 subtarget, parity_optab);
7520 if (target)
7521 return target;
7522 break;
7524 case BUILT_IN_STRLEN:
7525 target = expand_builtin_strlen (exp, target, target_mode);
7526 if (target)
7527 return target;
7528 break;
7530 case BUILT_IN_STRNLEN:
7531 target = expand_builtin_strnlen (exp, target, target_mode);
7532 if (target)
7533 return target;
7534 break;
7536 case BUILT_IN_STRCAT:
7537 target = expand_builtin_strcat (exp, target);
7538 if (target)
7539 return target;
7540 break;
7542 case BUILT_IN_STRCPY:
7543 target = expand_builtin_strcpy (exp, target);
7544 if (target)
7545 return target;
7546 break;
7548 case BUILT_IN_STRNCAT:
7549 target = expand_builtin_strncat (exp, target);
7550 if (target)
7551 return target;
7552 break;
7554 case BUILT_IN_STRNCPY:
7555 target = expand_builtin_strncpy (exp, target);
7556 if (target)
7557 return target;
7558 break;
7560 case BUILT_IN_STPCPY:
7561 target = expand_builtin_stpcpy (exp, target, mode);
7562 if (target)
7563 return target;
7564 break;
7566 case BUILT_IN_STPNCPY:
7567 target = expand_builtin_stpncpy (exp, target);
7568 if (target)
7569 return target;
7570 break;
7572 case BUILT_IN_MEMCHR:
7573 target = expand_builtin_memchr (exp, target);
7574 if (target)
7575 return target;
7576 break;
7578 case BUILT_IN_MEMCPY:
7579 target = expand_builtin_memcpy (exp, target);
7580 if (target)
7581 return target;
7582 break;
7584 case BUILT_IN_MEMMOVE:
7585 target = expand_builtin_memmove (exp, target);
7586 if (target)
7587 return target;
7588 break;
7590 case BUILT_IN_MEMPCPY:
7591 target = expand_builtin_mempcpy (exp, target);
7592 if (target)
7593 return target;
7594 break;
7596 case BUILT_IN_MEMSET:
7597 target = expand_builtin_memset (exp, target, mode);
7598 if (target)
7599 return target;
7600 break;
7602 case BUILT_IN_BZERO:
7603 target = expand_builtin_bzero (exp);
7604 if (target)
7605 return target;
7606 break;
7608 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7609 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
7610 when changing it to a strcmp call. */
7611 case BUILT_IN_STRCMP_EQ:
7612 target = expand_builtin_memcmp (exp, target, true);
7613 if (target)
7614 return target;
7616 /* Change this call back to a BUILT_IN_STRCMP. */
7617 TREE_OPERAND (exp, 1)
7618 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7620 /* Delete the last parameter. */
7621 unsigned int i;
7622 vec<tree, va_gc> *arg_vec;
7623 vec_alloc (arg_vec, 2);
7624 for (i = 0; i < 2; i++)
7625 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7626 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7627 /* FALLTHROUGH */
7629 case BUILT_IN_STRCMP:
7630 target = expand_builtin_strcmp (exp, target);
7631 if (target)
7632 return target;
7633 break;
7635 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7636 back to a BUILT_IN_STRNCMP. */
7637 case BUILT_IN_STRNCMP_EQ:
7638 target = expand_builtin_memcmp (exp, target, true);
7639 if (target)
7640 return target;
7642 /* Change it back to a BUILT_IN_STRNCMP. */
7643 TREE_OPERAND (exp, 1)
7644 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7645 /* FALLTHROUGH */
7647 case BUILT_IN_STRNCMP:
7648 target = expand_builtin_strncmp (exp, target, mode);
7649 if (target)
7650 return target;
7651 break;
7653 case BUILT_IN_BCMP:
7654 case BUILT_IN_MEMCMP:
7655 case BUILT_IN_MEMCMP_EQ:
7656 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7657 if (target)
7658 return target;
7659 if (fcode == BUILT_IN_MEMCMP_EQ)
7661 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7662 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7664 break;
7666 case BUILT_IN_SETJMP:
7667 /* This should have been lowered to the builtins below. */
7668 gcc_unreachable ();
7670 case BUILT_IN_SETJMP_SETUP:
7671 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7672 and the receiver label. */
7673 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7675 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7676 VOIDmode, EXPAND_NORMAL);
7677 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7678 rtx_insn *label_r = label_rtx (label);
7680 /* This is copied from the handling of non-local gotos. */
7681 expand_builtin_setjmp_setup (buf_addr, label_r);
7682 nonlocal_goto_handler_labels
7683 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7684 nonlocal_goto_handler_labels);
7685 /* ??? Do not let expand_label treat us as such since we would
7686 not want to be both on the list of non-local labels and on
7687 the list of forced labels. */
7688 FORCED_LABEL (label) = 0;
7689 return const0_rtx;
7691 break;
7693 case BUILT_IN_SETJMP_RECEIVER:
7694 /* __builtin_setjmp_receiver is passed the receiver label. */
7695 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7697 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7698 rtx_insn *label_r = label_rtx (label);
7700 expand_builtin_setjmp_receiver (label_r);
7701 return const0_rtx;
7703 break;
7705 /* __builtin_longjmp is passed a pointer to an array of five words.
7706 It's similar to the C library longjmp function but works with
7707 __builtin_setjmp above. */
7708 case BUILT_IN_LONGJMP:
7709 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7711 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7712 VOIDmode, EXPAND_NORMAL);
7713 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7715 if (value != const1_rtx)
7717 error ("%<__builtin_longjmp%> second argument must be 1");
7718 return const0_rtx;
7721 expand_builtin_longjmp (buf_addr, value);
7722 return const0_rtx;
7724 break;
7726 case BUILT_IN_NONLOCAL_GOTO:
7727 target = expand_builtin_nonlocal_goto (exp);
7728 if (target)
7729 return target;
7730 break;
7732 /* This updates the setjmp buffer that is its argument with the value
7733 of the current stack pointer. */
7734 case BUILT_IN_UPDATE_SETJMP_BUF:
7735 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7737 rtx buf_addr
7738 = expand_normal (CALL_EXPR_ARG (exp, 0));
7740 expand_builtin_update_setjmp_buf (buf_addr);
7741 return const0_rtx;
7743 break;
7745 case BUILT_IN_TRAP:
7746 expand_builtin_trap ();
7747 return const0_rtx;
7749 case BUILT_IN_UNREACHABLE:
7750 expand_builtin_unreachable ();
7751 return const0_rtx;
7753 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7754 case BUILT_IN_SIGNBITD32:
7755 case BUILT_IN_SIGNBITD64:
7756 case BUILT_IN_SIGNBITD128:
7757 target = expand_builtin_signbit (exp, target);
7758 if (target)
7759 return target;
7760 break;
7762 /* Various hooks for the DWARF 2 __throw routine. */
7763 case BUILT_IN_UNWIND_INIT:
7764 expand_builtin_unwind_init ();
7765 return const0_rtx;
7766 case BUILT_IN_DWARF_CFA:
7767 return virtual_cfa_rtx;
7768 #ifdef DWARF2_UNWIND_INFO
7769 case BUILT_IN_DWARF_SP_COLUMN:
7770 return expand_builtin_dwarf_sp_column ();
7771 case BUILT_IN_INIT_DWARF_REG_SIZES:
7772 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7773 return const0_rtx;
7774 #endif
7775 case BUILT_IN_FROB_RETURN_ADDR:
7776 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7777 case BUILT_IN_EXTRACT_RETURN_ADDR:
7778 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7779 case BUILT_IN_EH_RETURN:
7780 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7781 CALL_EXPR_ARG (exp, 1));
7782 return const0_rtx;
7783 case BUILT_IN_EH_RETURN_DATA_REGNO:
7784 return expand_builtin_eh_return_data_regno (exp);
7785 case BUILT_IN_EXTEND_POINTER:
7786 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7787 case BUILT_IN_EH_POINTER:
7788 return expand_builtin_eh_pointer (exp);
7789 case BUILT_IN_EH_FILTER:
7790 return expand_builtin_eh_filter (exp);
7791 case BUILT_IN_EH_COPY_VALUES:
7792 return expand_builtin_eh_copy_values (exp);
7794 case BUILT_IN_VA_START:
7795 return expand_builtin_va_start (exp);
7796 case BUILT_IN_VA_END:
7797 return expand_builtin_va_end (exp);
7798 case BUILT_IN_VA_COPY:
7799 return expand_builtin_va_copy (exp);
7800 case BUILT_IN_EXPECT:
7801 return expand_builtin_expect (exp, target);
7802 case BUILT_IN_EXPECT_WITH_PROBABILITY:
7803 return expand_builtin_expect_with_probability (exp, target);
7804 case BUILT_IN_ASSUME_ALIGNED:
7805 return expand_builtin_assume_aligned (exp, target);
7806 case BUILT_IN_PREFETCH:
7807 expand_builtin_prefetch (exp);
7808 return const0_rtx;
7810 case BUILT_IN_INIT_TRAMPOLINE:
7811 return expand_builtin_init_trampoline (exp, true);
7812 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7813 return expand_builtin_init_trampoline (exp, false);
7814 case BUILT_IN_ADJUST_TRAMPOLINE:
7815 return expand_builtin_adjust_trampoline (exp);
7817 case BUILT_IN_INIT_DESCRIPTOR:
7818 return expand_builtin_init_descriptor (exp);
7819 case BUILT_IN_ADJUST_DESCRIPTOR:
7820 return expand_builtin_adjust_descriptor (exp);
7822 case BUILT_IN_FORK:
7823 case BUILT_IN_EXECL:
7824 case BUILT_IN_EXECV:
7825 case BUILT_IN_EXECLP:
7826 case BUILT_IN_EXECLE:
7827 case BUILT_IN_EXECVP:
7828 case BUILT_IN_EXECVE:
7829 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7830 if (target)
7831 return target;
7832 break;
7834 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7835 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7836 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7837 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7838 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7839 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7840 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7841 if (target)
7842 return target;
7843 break;
7845 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7846 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7847 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7848 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7849 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7850 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7851 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7852 if (target)
7853 return target;
7854 break;
7856 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7857 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7858 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7859 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7860 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7861 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7862 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7863 if (target)
7864 return target;
7865 break;
7867 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7868 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7869 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7870 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7871 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7872 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7873 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7874 if (target)
7875 return target;
7876 break;
7878 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7879 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7880 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7881 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7882 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7883 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7884 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7885 if (target)
7886 return target;
7887 break;
7889 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7890 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7891 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7892 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7893 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7894 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7895 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7896 if (target)
7897 return target;
7898 break;
7900 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7901 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7902 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7903 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7904 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7905 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7906 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7907 if (target)
7908 return target;
7909 break;
7911 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7912 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7913 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7914 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7915 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7916 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7917 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7918 if (target)
7919 return target;
7920 break;
7922 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7923 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7924 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7925 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7926 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7927 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7928 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7929 if (target)
7930 return target;
7931 break;
7933 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7934 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7935 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7936 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7937 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7938 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7939 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7940 if (target)
7941 return target;
7942 break;
7944 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7945 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7946 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7947 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7948 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7949 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7950 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7951 if (target)
7952 return target;
7953 break;
7955 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7956 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7957 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7958 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7959 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7960 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7961 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7962 if (target)
7963 return target;
7964 break;
7966 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7967 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7968 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7969 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7970 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7971 if (mode == VOIDmode)
7972 mode = TYPE_MODE (boolean_type_node);
7973 if (!target || !register_operand (target, mode))
7974 target = gen_reg_rtx (mode);
7976 mode = get_builtin_sync_mode
7977 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7978 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7979 if (target)
7980 return target;
7981 break;
7983 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7984 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7985 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7986 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7987 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7988 mode = get_builtin_sync_mode
7989 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7990 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7991 if (target)
7992 return target;
7993 break;
7995 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7996 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7997 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7998 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7999 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
8000 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
8001 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
8002 if (target)
8003 return target;
8004 break;
8006 case BUILT_IN_SYNC_LOCK_RELEASE_1:
8007 case BUILT_IN_SYNC_LOCK_RELEASE_2:
8008 case BUILT_IN_SYNC_LOCK_RELEASE_4:
8009 case BUILT_IN_SYNC_LOCK_RELEASE_8:
8010 case BUILT_IN_SYNC_LOCK_RELEASE_16:
8011 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
8012 expand_builtin_sync_lock_release (mode, exp);
8013 return const0_rtx;
8015 case BUILT_IN_SYNC_SYNCHRONIZE:
8016 expand_builtin_sync_synchronize ();
8017 return const0_rtx;
8019 case BUILT_IN_ATOMIC_EXCHANGE_1:
8020 case BUILT_IN_ATOMIC_EXCHANGE_2:
8021 case BUILT_IN_ATOMIC_EXCHANGE_4:
8022 case BUILT_IN_ATOMIC_EXCHANGE_8:
8023 case BUILT_IN_ATOMIC_EXCHANGE_16:
8024 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
8025 target = expand_builtin_atomic_exchange (mode, exp, target);
8026 if (target)
8027 return target;
8028 break;
8030 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
8031 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
8032 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
8033 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
8034 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
8036 unsigned int nargs, z;
8037 vec<tree, va_gc> *vec;
8039 mode =
8040 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
8041 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
8042 if (target)
8043 return target;
8045 /* If this is turned into an external library call, the weak parameter
8046 must be dropped to match the expected parameter list. */
8047 nargs = call_expr_nargs (exp);
8048 vec_alloc (vec, nargs - 1);
8049 for (z = 0; z < 3; z++)
8050 vec->quick_push (CALL_EXPR_ARG (exp, z));
8051 /* Skip the boolean weak parameter. */
8052 for (z = 4; z < 6; z++)
8053 vec->quick_push (CALL_EXPR_ARG (exp, z));
8054 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
8055 break;
8058 case BUILT_IN_ATOMIC_LOAD_1:
8059 case BUILT_IN_ATOMIC_LOAD_2:
8060 case BUILT_IN_ATOMIC_LOAD_4:
8061 case BUILT_IN_ATOMIC_LOAD_8:
8062 case BUILT_IN_ATOMIC_LOAD_16:
8063 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
8064 target = expand_builtin_atomic_load (mode, exp, target);
8065 if (target)
8066 return target;
8067 break;
8069 case BUILT_IN_ATOMIC_STORE_1:
8070 case BUILT_IN_ATOMIC_STORE_2:
8071 case BUILT_IN_ATOMIC_STORE_4:
8072 case BUILT_IN_ATOMIC_STORE_8:
8073 case BUILT_IN_ATOMIC_STORE_16:
8074 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
8075 target = expand_builtin_atomic_store (mode, exp);
8076 if (target)
8077 return const0_rtx;
8078 break;
8080 case BUILT_IN_ATOMIC_ADD_FETCH_1:
8081 case BUILT_IN_ATOMIC_ADD_FETCH_2:
8082 case BUILT_IN_ATOMIC_ADD_FETCH_4:
8083 case BUILT_IN_ATOMIC_ADD_FETCH_8:
8084 case BUILT_IN_ATOMIC_ADD_FETCH_16:
8086 enum built_in_function lib;
8087 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
8088 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
8089 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
8090 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
8091 ignore, lib);
8092 if (target)
8093 return target;
8094 break;
8096 case BUILT_IN_ATOMIC_SUB_FETCH_1:
8097 case BUILT_IN_ATOMIC_SUB_FETCH_2:
8098 case BUILT_IN_ATOMIC_SUB_FETCH_4:
8099 case BUILT_IN_ATOMIC_SUB_FETCH_8:
8100 case BUILT_IN_ATOMIC_SUB_FETCH_16:
8102 enum built_in_function lib;
8103 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
8104 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
8105 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
8106 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
8107 ignore, lib);
8108 if (target)
8109 return target;
8110 break;
8112 case BUILT_IN_ATOMIC_AND_FETCH_1:
8113 case BUILT_IN_ATOMIC_AND_FETCH_2:
8114 case BUILT_IN_ATOMIC_AND_FETCH_4:
8115 case BUILT_IN_ATOMIC_AND_FETCH_8:
8116 case BUILT_IN_ATOMIC_AND_FETCH_16:
8118 enum built_in_function lib;
8119 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
8120 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
8121 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
8122 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
8123 ignore, lib);
8124 if (target)
8125 return target;
8126 break;
8128 case BUILT_IN_ATOMIC_NAND_FETCH_1:
8129 case BUILT_IN_ATOMIC_NAND_FETCH_2:
8130 case BUILT_IN_ATOMIC_NAND_FETCH_4:
8131 case BUILT_IN_ATOMIC_NAND_FETCH_8:
8132 case BUILT_IN_ATOMIC_NAND_FETCH_16:
8134 enum built_in_function lib;
8135 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
8136 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
8137 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
8138 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
8139 ignore, lib);
8140 if (target)
8141 return target;
8142 break;
8144 case BUILT_IN_ATOMIC_XOR_FETCH_1:
8145 case BUILT_IN_ATOMIC_XOR_FETCH_2:
8146 case BUILT_IN_ATOMIC_XOR_FETCH_4:
8147 case BUILT_IN_ATOMIC_XOR_FETCH_8:
8148 case BUILT_IN_ATOMIC_XOR_FETCH_16:
8150 enum built_in_function lib;
8151 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
8152 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
8153 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
8154 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
8155 ignore, lib);
8156 if (target)
8157 return target;
8158 break;
8160 case BUILT_IN_ATOMIC_OR_FETCH_1:
8161 case BUILT_IN_ATOMIC_OR_FETCH_2:
8162 case BUILT_IN_ATOMIC_OR_FETCH_4:
8163 case BUILT_IN_ATOMIC_OR_FETCH_8:
8164 case BUILT_IN_ATOMIC_OR_FETCH_16:
8166 enum built_in_function lib;
8167 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
8168 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
8169 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
8170 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
8171 ignore, lib);
8172 if (target)
8173 return target;
8174 break;
8176 case BUILT_IN_ATOMIC_FETCH_ADD_1:
8177 case BUILT_IN_ATOMIC_FETCH_ADD_2:
8178 case BUILT_IN_ATOMIC_FETCH_ADD_4:
8179 case BUILT_IN_ATOMIC_FETCH_ADD_8:
8180 case BUILT_IN_ATOMIC_FETCH_ADD_16:
8181 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
8182 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
8183 ignore, BUILT_IN_NONE);
8184 if (target)
8185 return target;
8186 break;
8188 case BUILT_IN_ATOMIC_FETCH_SUB_1:
8189 case BUILT_IN_ATOMIC_FETCH_SUB_2:
8190 case BUILT_IN_ATOMIC_FETCH_SUB_4:
8191 case BUILT_IN_ATOMIC_FETCH_SUB_8:
8192 case BUILT_IN_ATOMIC_FETCH_SUB_16:
8193 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
8194 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
8195 ignore, BUILT_IN_NONE);
8196 if (target)
8197 return target;
8198 break;
8200 case BUILT_IN_ATOMIC_FETCH_AND_1:
8201 case BUILT_IN_ATOMIC_FETCH_AND_2:
8202 case BUILT_IN_ATOMIC_FETCH_AND_4:
8203 case BUILT_IN_ATOMIC_FETCH_AND_8:
8204 case BUILT_IN_ATOMIC_FETCH_AND_16:
8205 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8206 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8207 ignore, BUILT_IN_NONE);
8208 if (target)
8209 return target;
8210 break;
8212 case BUILT_IN_ATOMIC_FETCH_NAND_1:
8213 case BUILT_IN_ATOMIC_FETCH_NAND_2:
8214 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8215 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8216 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8217 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8218 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8219 ignore, BUILT_IN_NONE);
8220 if (target)
8221 return target;
8222 break;
8224 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8225 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8226 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8227 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8228 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8229 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8230 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8231 ignore, BUILT_IN_NONE);
8232 if (target)
8233 return target;
8234 break;
8236 case BUILT_IN_ATOMIC_FETCH_OR_1:
8237 case BUILT_IN_ATOMIC_FETCH_OR_2:
8238 case BUILT_IN_ATOMIC_FETCH_OR_4:
8239 case BUILT_IN_ATOMIC_FETCH_OR_8:
8240 case BUILT_IN_ATOMIC_FETCH_OR_16:
8241 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8242 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8243 ignore, BUILT_IN_NONE);
8244 if (target)
8245 return target;
8246 break;
8248 case BUILT_IN_ATOMIC_TEST_AND_SET:
8249 return expand_builtin_atomic_test_and_set (exp, target);
8251 case BUILT_IN_ATOMIC_CLEAR:
8252 return expand_builtin_atomic_clear (exp);
8254 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8255 return expand_builtin_atomic_always_lock_free (exp);
8257 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8258 target = expand_builtin_atomic_is_lock_free (exp);
8259 if (target)
8260 return target;
8261 break;
8263 case BUILT_IN_ATOMIC_THREAD_FENCE:
8264 expand_builtin_atomic_thread_fence (exp);
8265 return const0_rtx;
8267 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8268 expand_builtin_atomic_signal_fence (exp);
8269 return const0_rtx;
8271 case BUILT_IN_OBJECT_SIZE:
8272 return expand_builtin_object_size (exp);
8274 case BUILT_IN_MEMCPY_CHK:
8275 case BUILT_IN_MEMPCPY_CHK:
8276 case BUILT_IN_MEMMOVE_CHK:
8277 case BUILT_IN_MEMSET_CHK:
8278 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8279 if (target)
8280 return target;
8281 break;
8283 case BUILT_IN_STRCPY_CHK:
8284 case BUILT_IN_STPCPY_CHK:
8285 case BUILT_IN_STRNCPY_CHK:
8286 case BUILT_IN_STPNCPY_CHK:
8287 case BUILT_IN_STRCAT_CHK:
8288 case BUILT_IN_STRNCAT_CHK:
8289 case BUILT_IN_SNPRINTF_CHK:
8290 case BUILT_IN_VSNPRINTF_CHK:
8291 maybe_emit_chk_warning (exp, fcode);
8292 break;
8294 case BUILT_IN_SPRINTF_CHK:
8295 case BUILT_IN_VSPRINTF_CHK:
8296 maybe_emit_sprintf_chk_warning (exp, fcode);
8297 break;
8299 case BUILT_IN_FREE:
8300 if (warn_free_nonheap_object)
8301 maybe_emit_free_warning (exp);
8302 break;
8304 case BUILT_IN_THREAD_POINTER:
8305 return expand_builtin_thread_pointer (exp, target);
8307 case BUILT_IN_SET_THREAD_POINTER:
8308 expand_builtin_set_thread_pointer (exp);
8309 return const0_rtx;
8311 case BUILT_IN_ACC_ON_DEVICE:
8312 /* Do library call, if we failed to expand the builtin when
8313 folding. */
8314 break;
8316 case BUILT_IN_GOACC_PARLEVEL_ID:
8317 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8318 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8320 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8321 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8323 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8324 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8325 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8326 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8327 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8328 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8329 return expand_speculation_safe_value (mode, exp, target, ignore);
8331 default: /* just do library call, if unknown builtin */
8332 break;
8335 /* The switch statement above can drop through to cause the function
8336 to be called normally. */
8337 return expand_call (exp, target, ignore);
8340 /* Determine whether a tree node represents a call to a built-in
8341 function. If the tree T is a call to a built-in function with
8342 the right number of arguments of the appropriate types, return
8343 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8344 Otherwise the return value is END_BUILTINS. */
8346 enum built_in_function
8347 builtin_mathfn_code (const_tree t)
8349 const_tree fndecl, arg, parmlist;
8350 const_tree argtype, parmtype;
8351 const_call_expr_arg_iterator iter;
8353 if (TREE_CODE (t) != CALL_EXPR)
8354 return END_BUILTINS;
8356 fndecl = get_callee_fndecl (t);
8357 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8358 return END_BUILTINS;
8360 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8361 init_const_call_expr_arg_iterator (t, &iter);
8362 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8364 /* If a function doesn't take a variable number of arguments,
8365 the last element in the list will have type `void'. */
8366 parmtype = TREE_VALUE (parmlist);
8367 if (VOID_TYPE_P (parmtype))
8369 if (more_const_call_expr_args_p (&iter))
8370 return END_BUILTINS;
8371 return DECL_FUNCTION_CODE (fndecl);
8374 if (! more_const_call_expr_args_p (&iter))
8375 return END_BUILTINS;
8377 arg = next_const_call_expr_arg (&iter);
8378 argtype = TREE_TYPE (arg);
8380 if (SCALAR_FLOAT_TYPE_P (parmtype))
8382 if (! SCALAR_FLOAT_TYPE_P (argtype))
8383 return END_BUILTINS;
8385 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8387 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8388 return END_BUILTINS;
8390 else if (POINTER_TYPE_P (parmtype))
8392 if (! POINTER_TYPE_P (argtype))
8393 return END_BUILTINS;
8395 else if (INTEGRAL_TYPE_P (parmtype))
8397 if (! INTEGRAL_TYPE_P (argtype))
8398 return END_BUILTINS;
8400 else
8401 return END_BUILTINS;
8404 /* Variable-length argument list. */
8405 return DECL_FUNCTION_CODE (fndecl);
8408 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8409 evaluate to a constant. */
8411 static tree
8412 fold_builtin_constant_p (tree arg)
8414 /* We return 1 for a numeric type that's known to be a constant
8415 value at compile-time or for an aggregate type that's a
8416 literal constant. */
8417 STRIP_NOPS (arg);
8419 /* If we know this is a constant, emit the constant of one. */
8420 if (CONSTANT_CLASS_P (arg)
8421 || (TREE_CODE (arg) == CONSTRUCTOR
8422 && TREE_CONSTANT (arg)))
8423 return integer_one_node;
8424 if (TREE_CODE (arg) == ADDR_EXPR)
8426 tree op = TREE_OPERAND (arg, 0);
8427 if (TREE_CODE (op) == STRING_CST
8428 || (TREE_CODE (op) == ARRAY_REF
8429 && integer_zerop (TREE_OPERAND (op, 1))
8430 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8431 return integer_one_node;
8434 /* If this expression has side effects, show we don't know it to be a
8435 constant. Likewise if it's a pointer or aggregate type since in
8436 those case we only want literals, since those are only optimized
8437 when generating RTL, not later.
8438 And finally, if we are compiling an initializer, not code, we
8439 need to return a definite result now; there's not going to be any
8440 more optimization done. */
8441 if (TREE_SIDE_EFFECTS (arg)
8442 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8443 || POINTER_TYPE_P (TREE_TYPE (arg))
8444 || cfun == 0
8445 || folding_initializer
8446 || force_folding_builtin_constant_p)
8447 return integer_zero_node;
8449 return NULL_TREE;
8452 /* Create builtin_expect or builtin_expect_with_probability
8453 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8454 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8455 builtin_expect_with_probability instead uses third argument as PROBABILITY
8456 value. */
8458 static tree
8459 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8460 tree predictor, tree probability)
8462 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
8464 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
8465 : BUILT_IN_EXPECT_WITH_PROBABILITY);
8466 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8467 ret_type = TREE_TYPE (TREE_TYPE (fn));
8468 pred_type = TREE_VALUE (arg_types);
8469 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8471 pred = fold_convert_loc (loc, pred_type, pred);
8472 expected = fold_convert_loc (loc, expected_type, expected);
8474 if (probability)
8475 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
8476 else
8477 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8478 predictor);
8480 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8481 build_int_cst (ret_type, 0));
8484 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
8485 NULL_TREE if no simplification is possible. */
8487 tree
8488 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
8489 tree arg3)
8491 tree inner, fndecl, inner_arg0;
8492 enum tree_code code;
8494 /* Distribute the expected value over short-circuiting operators.
8495 See through the cast from truthvalue_type_node to long. */
8496 inner_arg0 = arg0;
8497 while (CONVERT_EXPR_P (inner_arg0)
8498 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8499 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8500 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8502 /* If this is a builtin_expect within a builtin_expect keep the
8503 inner one. See through a comparison against a constant. It
8504 might have been added to create a thruthvalue. */
8505 inner = inner_arg0;
8507 if (COMPARISON_CLASS_P (inner)
8508 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8509 inner = TREE_OPERAND (inner, 0);
8511 if (TREE_CODE (inner) == CALL_EXPR
8512 && (fndecl = get_callee_fndecl (inner))
8513 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
8514 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
8515 return arg0;
8517 inner = inner_arg0;
8518 code = TREE_CODE (inner);
8519 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8521 tree op0 = TREE_OPERAND (inner, 0);
8522 tree op1 = TREE_OPERAND (inner, 1);
8523 arg1 = save_expr (arg1);
8525 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
8526 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
8527 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8529 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8532 /* If the argument isn't invariant then there's nothing else we can do. */
8533 if (!TREE_CONSTANT (inner_arg0))
8534 return NULL_TREE;
8536 /* If we expect that a comparison against the argument will fold to
8537 a constant return the constant. In practice, this means a true
8538 constant or the address of a non-weak symbol. */
8539 inner = inner_arg0;
8540 STRIP_NOPS (inner);
8541 if (TREE_CODE (inner) == ADDR_EXPR)
8545 inner = TREE_OPERAND (inner, 0);
8547 while (TREE_CODE (inner) == COMPONENT_REF
8548 || TREE_CODE (inner) == ARRAY_REF);
8549 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8550 return NULL_TREE;
8553 /* Otherwise, ARG0 already has the proper type for the return value. */
8554 return arg0;
8557 /* Fold a call to __builtin_classify_type with argument ARG. */
8559 static tree
8560 fold_builtin_classify_type (tree arg)
8562 if (arg == 0)
8563 return build_int_cst (integer_type_node, no_type_class);
8565 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8568 /* Fold a call to __builtin_strlen with argument ARG. */
8570 static tree
8571 fold_builtin_strlen (location_t loc, tree type, tree arg)
8573 if (!validate_arg (arg, POINTER_TYPE))
8574 return NULL_TREE;
8575 else
8577 c_strlen_data lendata = { };
8578 tree len = c_strlen (arg, 0, &lendata);
8580 if (len)
8581 return fold_convert_loc (loc, type, len);
8583 if (!lendata.decl)
8584 c_strlen (arg, 1, &lendata);
8586 if (lendata.decl)
8588 if (EXPR_HAS_LOCATION (arg))
8589 loc = EXPR_LOCATION (arg);
8590 else if (loc == UNKNOWN_LOCATION)
8591 loc = input_location;
8592 warn_string_no_nul (loc, "strlen", arg, lendata.decl);
8595 return NULL_TREE;
8599 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8601 static tree
8602 fold_builtin_inf (location_t loc, tree type, int warn)
8604 REAL_VALUE_TYPE real;
8606 /* __builtin_inff is intended to be usable to define INFINITY on all
8607 targets. If an infinity is not available, INFINITY expands "to a
8608 positive constant of type float that overflows at translation
8609 time", footnote "In this case, using INFINITY will violate the
8610 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8611 Thus we pedwarn to ensure this constraint violation is
8612 diagnosed. */
8613 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8614 pedwarn (loc, 0, "target format does not support infinity");
8616 real_inf (&real);
8617 return build_real (type, real);
8620 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8621 NULL_TREE if no simplification can be made. */
8623 static tree
8624 fold_builtin_sincos (location_t loc,
8625 tree arg0, tree arg1, tree arg2)
8627 tree type;
8628 tree fndecl, call = NULL_TREE;
8630 if (!validate_arg (arg0, REAL_TYPE)
8631 || !validate_arg (arg1, POINTER_TYPE)
8632 || !validate_arg (arg2, POINTER_TYPE))
8633 return NULL_TREE;
8635 type = TREE_TYPE (arg0);
8637 /* Calculate the result when the argument is a constant. */
8638 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8639 if (fn == END_BUILTINS)
8640 return NULL_TREE;
8642 /* Canonicalize sincos to cexpi. */
8643 if (TREE_CODE (arg0) == REAL_CST)
8645 tree complex_type = build_complex_type (type);
8646 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8648 if (!call)
8650 if (!targetm.libc_has_function (function_c99_math_complex)
8651 || !builtin_decl_implicit_p (fn))
8652 return NULL_TREE;
8653 fndecl = builtin_decl_explicit (fn);
8654 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8655 call = builtin_save_expr (call);
8658 tree ptype = build_pointer_type (type);
8659 arg1 = fold_convert (ptype, arg1);
8660 arg2 = fold_convert (ptype, arg2);
8661 return build2 (COMPOUND_EXPR, void_type_node,
8662 build2 (MODIFY_EXPR, void_type_node,
8663 build_fold_indirect_ref_loc (loc, arg1),
8664 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8665 build2 (MODIFY_EXPR, void_type_node,
8666 build_fold_indirect_ref_loc (loc, arg2),
8667 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8670 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8671 Return NULL_TREE if no simplification can be made. */
8673 static tree
8674 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8676 if (!validate_arg (arg1, POINTER_TYPE)
8677 || !validate_arg (arg2, POINTER_TYPE)
8678 || !validate_arg (len, INTEGER_TYPE))
8679 return NULL_TREE;
8681 /* If the LEN parameter is zero, return zero. */
8682 if (integer_zerop (len))
8683 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8684 arg1, arg2);
8686 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8687 if (operand_equal_p (arg1, arg2, 0))
8688 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8690 /* If len parameter is one, return an expression corresponding to
8691 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8692 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8694 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8695 tree cst_uchar_ptr_node
8696 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8698 tree ind1
8699 = fold_convert_loc (loc, integer_type_node,
8700 build1 (INDIRECT_REF, cst_uchar_node,
8701 fold_convert_loc (loc,
8702 cst_uchar_ptr_node,
8703 arg1)));
8704 tree ind2
8705 = fold_convert_loc (loc, integer_type_node,
8706 build1 (INDIRECT_REF, cst_uchar_node,
8707 fold_convert_loc (loc,
8708 cst_uchar_ptr_node,
8709 arg2)));
8710 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8713 return NULL_TREE;
8716 /* Fold a call to builtin isascii with argument ARG. */
8718 static tree
8719 fold_builtin_isascii (location_t loc, tree arg)
8721 if (!validate_arg (arg, INTEGER_TYPE))
8722 return NULL_TREE;
8723 else
8725 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8726 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8727 build_int_cst (integer_type_node,
8728 ~ (unsigned HOST_WIDE_INT) 0x7f));
8729 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8730 arg, integer_zero_node);
8734 /* Fold a call to builtin toascii with argument ARG. */
8736 static tree
8737 fold_builtin_toascii (location_t loc, tree arg)
8739 if (!validate_arg (arg, INTEGER_TYPE))
8740 return NULL_TREE;
8742 /* Transform toascii(c) -> (c & 0x7f). */
8743 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8744 build_int_cst (integer_type_node, 0x7f));
8747 /* Fold a call to builtin isdigit with argument ARG. */
8749 static tree
8750 fold_builtin_isdigit (location_t loc, tree arg)
8752 if (!validate_arg (arg, INTEGER_TYPE))
8753 return NULL_TREE;
8754 else
8756 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8757 /* According to the C standard, isdigit is unaffected by locale.
8758 However, it definitely is affected by the target character set. */
8759 unsigned HOST_WIDE_INT target_digit0
8760 = lang_hooks.to_target_charset ('0');
8762 if (target_digit0 == 0)
8763 return NULL_TREE;
8765 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8766 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8767 build_int_cst (unsigned_type_node, target_digit0));
8768 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8769 build_int_cst (unsigned_type_node, 9));
8773 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8775 static tree
8776 fold_builtin_fabs (location_t loc, tree arg, tree type)
8778 if (!validate_arg (arg, REAL_TYPE))
8779 return NULL_TREE;
8781 arg = fold_convert_loc (loc, type, arg);
8782 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8785 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8787 static tree
8788 fold_builtin_abs (location_t loc, tree arg, tree type)
8790 if (!validate_arg (arg, INTEGER_TYPE))
8791 return NULL_TREE;
8793 arg = fold_convert_loc (loc, type, arg);
8794 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8797 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8799 static tree
8800 fold_builtin_carg (location_t loc, tree arg, tree type)
8802 if (validate_arg (arg, COMPLEX_TYPE)
8803 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8805 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8807 if (atan2_fn)
8809 tree new_arg = builtin_save_expr (arg);
8810 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8811 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8812 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8816 return NULL_TREE;
8819 /* Fold a call to builtin frexp, we can assume the base is 2. */
8821 static tree
8822 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8824 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8825 return NULL_TREE;
8827 STRIP_NOPS (arg0);
8829 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8830 return NULL_TREE;
8832 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8834 /* Proceed if a valid pointer type was passed in. */
8835 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8837 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8838 tree frac, exp;
8840 switch (value->cl)
8842 case rvc_zero:
8843 /* For +-0, return (*exp = 0, +-0). */
8844 exp = integer_zero_node;
8845 frac = arg0;
8846 break;
8847 case rvc_nan:
8848 case rvc_inf:
8849 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8850 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8851 case rvc_normal:
8853 /* Since the frexp function always expects base 2, and in
8854 GCC normalized significands are already in the range
8855 [0.5, 1.0), we have exactly what frexp wants. */
8856 REAL_VALUE_TYPE frac_rvt = *value;
8857 SET_REAL_EXP (&frac_rvt, 0);
8858 frac = build_real (rettype, frac_rvt);
8859 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8861 break;
8862 default:
8863 gcc_unreachable ();
8866 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8867 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8868 TREE_SIDE_EFFECTS (arg1) = 1;
8869 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8872 return NULL_TREE;
8875 /* Fold a call to builtin modf. */
8877 static tree
8878 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8880 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8881 return NULL_TREE;
8883 STRIP_NOPS (arg0);
8885 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8886 return NULL_TREE;
8888 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8890 /* Proceed if a valid pointer type was passed in. */
8891 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8893 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8894 REAL_VALUE_TYPE trunc, frac;
8896 switch (value->cl)
8898 case rvc_nan:
8899 case rvc_zero:
8900 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8901 trunc = frac = *value;
8902 break;
8903 case rvc_inf:
8904 /* For +-Inf, return (*arg1 = arg0, +-0). */
8905 frac = dconst0;
8906 frac.sign = value->sign;
8907 trunc = *value;
8908 break;
8909 case rvc_normal:
8910 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8911 real_trunc (&trunc, VOIDmode, value);
8912 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8913 /* If the original number was negative and already
8914 integral, then the fractional part is -0.0. */
8915 if (value->sign && frac.cl == rvc_zero)
8916 frac.sign = value->sign;
8917 break;
8920 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8921 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8922 build_real (rettype, trunc));
8923 TREE_SIDE_EFFECTS (arg1) = 1;
8924 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8925 build_real (rettype, frac));
8928 return NULL_TREE;
8931 /* Given a location LOC, an interclass builtin function decl FNDECL
8932 and its single argument ARG, return an folded expression computing
8933 the same, or NULL_TREE if we either couldn't or didn't want to fold
8934 (the latter happen if there's an RTL instruction available). */
8936 static tree
8937 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8939 machine_mode mode;
8941 if (!validate_arg (arg, REAL_TYPE))
8942 return NULL_TREE;
8944 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8945 return NULL_TREE;
8947 mode = TYPE_MODE (TREE_TYPE (arg));
8949 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8951 /* If there is no optab, try generic code. */
8952 switch (DECL_FUNCTION_CODE (fndecl))
8954 tree result;
8956 CASE_FLT_FN (BUILT_IN_ISINF):
8958 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8959 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8960 tree type = TREE_TYPE (arg);
8961 REAL_VALUE_TYPE r;
8962 char buf[128];
8964 if (is_ibm_extended)
8966 /* NaN and Inf are encoded in the high-order double value
8967 only. The low-order value is not significant. */
8968 type = double_type_node;
8969 mode = DFmode;
8970 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8972 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8973 real_from_string (&r, buf);
8974 result = build_call_expr (isgr_fn, 2,
8975 fold_build1_loc (loc, ABS_EXPR, type, arg),
8976 build_real (type, r));
8977 return result;
8979 CASE_FLT_FN (BUILT_IN_FINITE):
8980 case BUILT_IN_ISFINITE:
8982 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8983 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8984 tree type = TREE_TYPE (arg);
8985 REAL_VALUE_TYPE r;
8986 char buf[128];
8988 if (is_ibm_extended)
8990 /* NaN and Inf are encoded in the high-order double value
8991 only. The low-order value is not significant. */
8992 type = double_type_node;
8993 mode = DFmode;
8994 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8996 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8997 real_from_string (&r, buf);
8998 result = build_call_expr (isle_fn, 2,
8999 fold_build1_loc (loc, ABS_EXPR, type, arg),
9000 build_real (type, r));
9001 /*result = fold_build2_loc (loc, UNGT_EXPR,
9002 TREE_TYPE (TREE_TYPE (fndecl)),
9003 fold_build1_loc (loc, ABS_EXPR, type, arg),
9004 build_real (type, r));
9005 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9006 TREE_TYPE (TREE_TYPE (fndecl)),
9007 result);*/
9008 return result;
9010 case BUILT_IN_ISNORMAL:
9012 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9013 islessequal(fabs(x),DBL_MAX). */
9014 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9015 tree type = TREE_TYPE (arg);
9016 tree orig_arg, max_exp, min_exp;
9017 machine_mode orig_mode = mode;
9018 REAL_VALUE_TYPE rmax, rmin;
9019 char buf[128];
9021 orig_arg = arg = builtin_save_expr (arg);
9022 if (is_ibm_extended)
9024 /* Use double to test the normal range of IBM extended
9025 precision. Emin for IBM extended precision is
9026 different to emin for IEEE double, being 53 higher
9027 since the low double exponent is at least 53 lower
9028 than the high double exponent. */
9029 type = double_type_node;
9030 mode = DFmode;
9031 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9033 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
9035 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9036 real_from_string (&rmax, buf);
9037 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9038 real_from_string (&rmin, buf);
9039 max_exp = build_real (type, rmax);
9040 min_exp = build_real (type, rmin);
9042 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
9043 if (is_ibm_extended)
9045 /* Testing the high end of the range is done just using
9046 the high double, using the same test as isfinite().
9047 For the subnormal end of the range we first test the
9048 high double, then if its magnitude is equal to the
9049 limit of 0x1p-969, we test whether the low double is
9050 non-zero and opposite sign to the high double. */
9051 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
9052 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9053 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
9054 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
9055 arg, min_exp);
9056 tree as_complex = build1 (VIEW_CONVERT_EXPR,
9057 complex_double_type_node, orig_arg);
9058 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
9059 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
9060 tree zero = build_real (type, dconst0);
9061 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
9062 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
9063 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
9064 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
9065 fold_build3 (COND_EXPR,
9066 integer_type_node,
9067 hilt, logt, lolt));
9068 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
9069 eq_min, ok_lo);
9070 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
9071 gt_min, eq_min);
9073 else
9075 tree const isge_fn
9076 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9077 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
9079 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
9080 max_exp, min_exp);
9081 return result;
9083 default:
9084 break;
9087 return NULL_TREE;
9090 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9091 ARG is the argument for the call. */
9093 static tree
9094 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9096 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9098 if (!validate_arg (arg, REAL_TYPE))
9099 return NULL_TREE;
9101 switch (builtin_index)
9103 case BUILT_IN_ISINF:
9104 if (!HONOR_INFINITIES (arg))
9105 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9107 return NULL_TREE;
9109 case BUILT_IN_ISINF_SIGN:
9111 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9112 /* In a boolean context, GCC will fold the inner COND_EXPR to
9113 1. So e.g. "if (isinf_sign(x))" would be folded to just
9114 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9115 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
9116 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9117 tree tmp = NULL_TREE;
9119 arg = builtin_save_expr (arg);
9121 if (signbit_fn && isinf_fn)
9123 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9124 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9126 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9127 signbit_call, integer_zero_node);
9128 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9129 isinf_call, integer_zero_node);
9131 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9132 integer_minus_one_node, integer_one_node);
9133 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9134 isinf_call, tmp,
9135 integer_zero_node);
9138 return tmp;
9141 case BUILT_IN_ISFINITE:
9142 if (!HONOR_NANS (arg)
9143 && !HONOR_INFINITIES (arg))
9144 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9146 return NULL_TREE;
9148 case BUILT_IN_ISNAN:
9149 if (!HONOR_NANS (arg))
9150 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9153 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
9154 if (is_ibm_extended)
9156 /* NaN and Inf are encoded in the high-order double value
9157 only. The low-order value is not significant. */
9158 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9161 arg = builtin_save_expr (arg);
9162 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9164 default:
9165 gcc_unreachable ();
9169 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9170 This builtin will generate code to return the appropriate floating
9171 point classification depending on the value of the floating point
9172 number passed in. The possible return values must be supplied as
9173 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9174 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9175 one floating point argument which is "type generic". */
9177 static tree
9178 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9180 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9181 arg, type, res, tmp;
9182 machine_mode mode;
9183 REAL_VALUE_TYPE r;
9184 char buf[128];
9186 /* Verify the required arguments in the original call. */
9187 if (nargs != 6
9188 || !validate_arg (args[0], INTEGER_TYPE)
9189 || !validate_arg (args[1], INTEGER_TYPE)
9190 || !validate_arg (args[2], INTEGER_TYPE)
9191 || !validate_arg (args[3], INTEGER_TYPE)
9192 || !validate_arg (args[4], INTEGER_TYPE)
9193 || !validate_arg (args[5], REAL_TYPE))
9194 return NULL_TREE;
9196 fp_nan = args[0];
9197 fp_infinite = args[1];
9198 fp_normal = args[2];
9199 fp_subnormal = args[3];
9200 fp_zero = args[4];
9201 arg = args[5];
9202 type = TREE_TYPE (arg);
9203 mode = TYPE_MODE (type);
9204 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9206 /* fpclassify(x) ->
9207 isnan(x) ? FP_NAN :
9208 (fabs(x) == Inf ? FP_INFINITE :
9209 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9210 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9212 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9213 build_real (type, dconst0));
9214 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9215 tmp, fp_zero, fp_subnormal);
9217 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9218 real_from_string (&r, buf);
9219 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9220 arg, build_real (type, r));
9221 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9223 if (HONOR_INFINITIES (mode))
9225 real_inf (&r);
9226 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9227 build_real (type, r));
9228 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9229 fp_infinite, res);
9232 if (HONOR_NANS (mode))
9234 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9235 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9238 return res;
9241 /* Fold a call to an unordered comparison function such as
9242 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9243 being called and ARG0 and ARG1 are the arguments for the call.
9244 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9245 the opposite of the desired result. UNORDERED_CODE is used
9246 for modes that can hold NaNs and ORDERED_CODE is used for
9247 the rest. */
9249 static tree
9250 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9251 enum tree_code unordered_code,
9252 enum tree_code ordered_code)
9254 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9255 enum tree_code code;
9256 tree type0, type1;
9257 enum tree_code code0, code1;
9258 tree cmp_type = NULL_TREE;
9260 type0 = TREE_TYPE (arg0);
9261 type1 = TREE_TYPE (arg1);
9263 code0 = TREE_CODE (type0);
9264 code1 = TREE_CODE (type1);
9266 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9267 /* Choose the wider of two real types. */
9268 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9269 ? type0 : type1;
9270 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9271 cmp_type = type0;
9272 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9273 cmp_type = type1;
9275 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9276 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9278 if (unordered_code == UNORDERED_EXPR)
9280 if (!HONOR_NANS (arg0))
9281 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9282 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9285 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9286 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9287 fold_build2_loc (loc, code, type, arg0, arg1));
9290 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9291 arithmetics if it can never overflow, or into internal functions that
9292 return both result of arithmetics and overflowed boolean flag in
9293 a complex integer result, or some other check for overflow.
9294 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9295 checking part of that. */
9297 static tree
9298 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9299 tree arg0, tree arg1, tree arg2)
9301 enum internal_fn ifn = IFN_LAST;
9302 /* The code of the expression corresponding to the type-generic
9303 built-in, or ERROR_MARK for the type-specific ones. */
9304 enum tree_code opcode = ERROR_MARK;
9305 bool ovf_only = false;
9307 switch (fcode)
9309 case BUILT_IN_ADD_OVERFLOW_P:
9310 ovf_only = true;
9311 /* FALLTHRU */
9312 case BUILT_IN_ADD_OVERFLOW:
9313 opcode = PLUS_EXPR;
9314 /* FALLTHRU */
9315 case BUILT_IN_SADD_OVERFLOW:
9316 case BUILT_IN_SADDL_OVERFLOW:
9317 case BUILT_IN_SADDLL_OVERFLOW:
9318 case BUILT_IN_UADD_OVERFLOW:
9319 case BUILT_IN_UADDL_OVERFLOW:
9320 case BUILT_IN_UADDLL_OVERFLOW:
9321 ifn = IFN_ADD_OVERFLOW;
9322 break;
9323 case BUILT_IN_SUB_OVERFLOW_P:
9324 ovf_only = true;
9325 /* FALLTHRU */
9326 case BUILT_IN_SUB_OVERFLOW:
9327 opcode = MINUS_EXPR;
9328 /* FALLTHRU */
9329 case BUILT_IN_SSUB_OVERFLOW:
9330 case BUILT_IN_SSUBL_OVERFLOW:
9331 case BUILT_IN_SSUBLL_OVERFLOW:
9332 case BUILT_IN_USUB_OVERFLOW:
9333 case BUILT_IN_USUBL_OVERFLOW:
9334 case BUILT_IN_USUBLL_OVERFLOW:
9335 ifn = IFN_SUB_OVERFLOW;
9336 break;
9337 case BUILT_IN_MUL_OVERFLOW_P:
9338 ovf_only = true;
9339 /* FALLTHRU */
9340 case BUILT_IN_MUL_OVERFLOW:
9341 opcode = MULT_EXPR;
9342 /* FALLTHRU */
9343 case BUILT_IN_SMUL_OVERFLOW:
9344 case BUILT_IN_SMULL_OVERFLOW:
9345 case BUILT_IN_SMULLL_OVERFLOW:
9346 case BUILT_IN_UMUL_OVERFLOW:
9347 case BUILT_IN_UMULL_OVERFLOW:
9348 case BUILT_IN_UMULLL_OVERFLOW:
9349 ifn = IFN_MUL_OVERFLOW;
9350 break;
9351 default:
9352 gcc_unreachable ();
9355 /* For the "generic" overloads, the first two arguments can have different
9356 types and the last argument determines the target type to use to check
9357 for overflow. The arguments of the other overloads all have the same
9358 type. */
9359 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9361 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9362 arguments are constant, attempt to fold the built-in call into a constant
9363 expression indicating whether or not it detected an overflow. */
9364 if (ovf_only
9365 && TREE_CODE (arg0) == INTEGER_CST
9366 && TREE_CODE (arg1) == INTEGER_CST)
9367 /* Perform the computation in the target type and check for overflow. */
9368 return omit_one_operand_loc (loc, boolean_type_node,
9369 arith_overflowed_p (opcode, type, arg0, arg1)
9370 ? boolean_true_node : boolean_false_node,
9371 arg2);
9373 tree ctype = build_complex_type (type);
9374 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9375 2, arg0, arg1);
9376 tree tgt = save_expr (call);
9377 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9378 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9379 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9381 if (ovf_only)
9382 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9384 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9385 tree store
9386 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9387 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9390 /* Fold a call to __builtin_FILE to a constant string. */
9392 static inline tree
9393 fold_builtin_FILE (location_t loc)
9395 if (const char *fname = LOCATION_FILE (loc))
9397 /* The documentation says this builtin is equivalent to the preprocessor
9398 __FILE__ macro so it appears appropriate to use the same file prefix
9399 mappings. */
9400 fname = remap_macro_filename (fname);
9401 return build_string_literal (strlen (fname) + 1, fname);
9404 return build_string_literal (1, "");
9407 /* Fold a call to __builtin_FUNCTION to a constant string. */
9409 static inline tree
9410 fold_builtin_FUNCTION ()
9412 const char *name = "";
9414 if (current_function_decl)
9415 name = lang_hooks.decl_printable_name (current_function_decl, 0);
9417 return build_string_literal (strlen (name) + 1, name);
9420 /* Fold a call to __builtin_LINE to an integer constant. */
9422 static inline tree
9423 fold_builtin_LINE (location_t loc, tree type)
9425 return build_int_cst (type, LOCATION_LINE (loc));
9428 /* Fold a call to built-in function FNDECL with 0 arguments.
9429 This function returns NULL_TREE if no simplification was possible. */
9431 static tree
9432 fold_builtin_0 (location_t loc, tree fndecl)
9434 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9435 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9436 switch (fcode)
9438 case BUILT_IN_FILE:
9439 return fold_builtin_FILE (loc);
9441 case BUILT_IN_FUNCTION:
9442 return fold_builtin_FUNCTION ();
9444 case BUILT_IN_LINE:
9445 return fold_builtin_LINE (loc, type);
9447 CASE_FLT_FN (BUILT_IN_INF):
9448 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
9449 case BUILT_IN_INFD32:
9450 case BUILT_IN_INFD64:
9451 case BUILT_IN_INFD128:
9452 return fold_builtin_inf (loc, type, true);
9454 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9455 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
9456 return fold_builtin_inf (loc, type, false);
9458 case BUILT_IN_CLASSIFY_TYPE:
9459 return fold_builtin_classify_type (NULL_TREE);
9461 default:
9462 break;
9464 return NULL_TREE;
9467 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9468 This function returns NULL_TREE if no simplification was possible. */
9470 static tree
9471 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9473 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9474 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9476 if (TREE_CODE (arg0) == ERROR_MARK)
9477 return NULL_TREE;
9479 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
9480 return ret;
9482 switch (fcode)
9484 case BUILT_IN_CONSTANT_P:
9486 tree val = fold_builtin_constant_p (arg0);
9488 /* Gimplification will pull the CALL_EXPR for the builtin out of
9489 an if condition. When not optimizing, we'll not CSE it back.
9490 To avoid link error types of regressions, return false now. */
9491 if (!val && !optimize)
9492 val = integer_zero_node;
9494 return val;
9497 case BUILT_IN_CLASSIFY_TYPE:
9498 return fold_builtin_classify_type (arg0);
9500 case BUILT_IN_STRLEN:
9501 return fold_builtin_strlen (loc, type, arg0);
9503 CASE_FLT_FN (BUILT_IN_FABS):
9504 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9505 case BUILT_IN_FABSD32:
9506 case BUILT_IN_FABSD64:
9507 case BUILT_IN_FABSD128:
9508 return fold_builtin_fabs (loc, arg0, type);
9510 case BUILT_IN_ABS:
9511 case BUILT_IN_LABS:
9512 case BUILT_IN_LLABS:
9513 case BUILT_IN_IMAXABS:
9514 return fold_builtin_abs (loc, arg0, type);
9516 CASE_FLT_FN (BUILT_IN_CONJ):
9517 if (validate_arg (arg0, COMPLEX_TYPE)
9518 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9519 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9520 break;
9522 CASE_FLT_FN (BUILT_IN_CREAL):
9523 if (validate_arg (arg0, COMPLEX_TYPE)
9524 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9525 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9526 break;
9528 CASE_FLT_FN (BUILT_IN_CIMAG):
9529 if (validate_arg (arg0, COMPLEX_TYPE)
9530 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9531 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9532 break;
9534 CASE_FLT_FN (BUILT_IN_CARG):
9535 return fold_builtin_carg (loc, arg0, type);
9537 case BUILT_IN_ISASCII:
9538 return fold_builtin_isascii (loc, arg0);
9540 case BUILT_IN_TOASCII:
9541 return fold_builtin_toascii (loc, arg0);
9543 case BUILT_IN_ISDIGIT:
9544 return fold_builtin_isdigit (loc, arg0);
9546 CASE_FLT_FN (BUILT_IN_FINITE):
9547 case BUILT_IN_FINITED32:
9548 case BUILT_IN_FINITED64:
9549 case BUILT_IN_FINITED128:
9550 case BUILT_IN_ISFINITE:
9552 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9553 if (ret)
9554 return ret;
9555 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9558 CASE_FLT_FN (BUILT_IN_ISINF):
9559 case BUILT_IN_ISINFD32:
9560 case BUILT_IN_ISINFD64:
9561 case BUILT_IN_ISINFD128:
9563 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9564 if (ret)
9565 return ret;
9566 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9569 case BUILT_IN_ISNORMAL:
9570 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9572 case BUILT_IN_ISINF_SIGN:
9573 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9575 CASE_FLT_FN (BUILT_IN_ISNAN):
9576 case BUILT_IN_ISNAND32:
9577 case BUILT_IN_ISNAND64:
9578 case BUILT_IN_ISNAND128:
9579 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9581 case BUILT_IN_FREE:
9582 if (integer_zerop (arg0))
9583 return build_empty_stmt (loc);
9584 break;
9586 default:
9587 break;
9590 return NULL_TREE;
9594 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9595 This function returns NULL_TREE if no simplification was possible. */
9597 static tree
9598 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
9600 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9601 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9603 if (TREE_CODE (arg0) == ERROR_MARK
9604 || TREE_CODE (arg1) == ERROR_MARK)
9605 return NULL_TREE;
9607 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9608 return ret;
9610 switch (fcode)
9612 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9613 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9614 if (validate_arg (arg0, REAL_TYPE)
9615 && validate_arg (arg1, POINTER_TYPE))
9616 return do_mpfr_lgamma_r (arg0, arg1, type);
9617 break;
9619 CASE_FLT_FN (BUILT_IN_FREXP):
9620 return fold_builtin_frexp (loc, arg0, arg1, type);
9622 CASE_FLT_FN (BUILT_IN_MODF):
9623 return fold_builtin_modf (loc, arg0, arg1, type);
9625 case BUILT_IN_STRSPN:
9626 return fold_builtin_strspn (loc, arg0, arg1);
9628 case BUILT_IN_STRCSPN:
9629 return fold_builtin_strcspn (loc, arg0, arg1);
9631 case BUILT_IN_STRPBRK:
9632 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9634 case BUILT_IN_EXPECT:
9635 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
9637 case BUILT_IN_ISGREATER:
9638 return fold_builtin_unordered_cmp (loc, fndecl,
9639 arg0, arg1, UNLE_EXPR, LE_EXPR);
9640 case BUILT_IN_ISGREATEREQUAL:
9641 return fold_builtin_unordered_cmp (loc, fndecl,
9642 arg0, arg1, UNLT_EXPR, LT_EXPR);
9643 case BUILT_IN_ISLESS:
9644 return fold_builtin_unordered_cmp (loc, fndecl,
9645 arg0, arg1, UNGE_EXPR, GE_EXPR);
9646 case BUILT_IN_ISLESSEQUAL:
9647 return fold_builtin_unordered_cmp (loc, fndecl,
9648 arg0, arg1, UNGT_EXPR, GT_EXPR);
9649 case BUILT_IN_ISLESSGREATER:
9650 return fold_builtin_unordered_cmp (loc, fndecl,
9651 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9652 case BUILT_IN_ISUNORDERED:
9653 return fold_builtin_unordered_cmp (loc, fndecl,
9654 arg0, arg1, UNORDERED_EXPR,
9655 NOP_EXPR);
9657 /* We do the folding for va_start in the expander. */
9658 case BUILT_IN_VA_START:
9659 break;
9661 case BUILT_IN_OBJECT_SIZE:
9662 return fold_builtin_object_size (arg0, arg1);
9664 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9665 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9667 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9668 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9670 default:
9671 break;
9673 return NULL_TREE;
9676 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9677 and ARG2.
9678 This function returns NULL_TREE if no simplification was possible. */
9680 static tree
9681 fold_builtin_3 (location_t loc, tree fndecl,
9682 tree arg0, tree arg1, tree arg2)
9684 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9685 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9687 if (TREE_CODE (arg0) == ERROR_MARK
9688 || TREE_CODE (arg1) == ERROR_MARK
9689 || TREE_CODE (arg2) == ERROR_MARK)
9690 return NULL_TREE;
9692 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9693 arg0, arg1, arg2))
9694 return ret;
9696 switch (fcode)
9699 CASE_FLT_FN (BUILT_IN_SINCOS):
9700 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9702 CASE_FLT_FN (BUILT_IN_REMQUO):
9703 if (validate_arg (arg0, REAL_TYPE)
9704 && validate_arg (arg1, REAL_TYPE)
9705 && validate_arg (arg2, POINTER_TYPE))
9706 return do_mpfr_remquo (arg0, arg1, arg2);
9707 break;
9709 case BUILT_IN_MEMCMP:
9710 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9712 case BUILT_IN_EXPECT:
9713 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
9715 case BUILT_IN_EXPECT_WITH_PROBABILITY:
9716 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
9718 case BUILT_IN_ADD_OVERFLOW:
9719 case BUILT_IN_SUB_OVERFLOW:
9720 case BUILT_IN_MUL_OVERFLOW:
9721 case BUILT_IN_ADD_OVERFLOW_P:
9722 case BUILT_IN_SUB_OVERFLOW_P:
9723 case BUILT_IN_MUL_OVERFLOW_P:
9724 case BUILT_IN_SADD_OVERFLOW:
9725 case BUILT_IN_SADDL_OVERFLOW:
9726 case BUILT_IN_SADDLL_OVERFLOW:
9727 case BUILT_IN_SSUB_OVERFLOW:
9728 case BUILT_IN_SSUBL_OVERFLOW:
9729 case BUILT_IN_SSUBLL_OVERFLOW:
9730 case BUILT_IN_SMUL_OVERFLOW:
9731 case BUILT_IN_SMULL_OVERFLOW:
9732 case BUILT_IN_SMULLL_OVERFLOW:
9733 case BUILT_IN_UADD_OVERFLOW:
9734 case BUILT_IN_UADDL_OVERFLOW:
9735 case BUILT_IN_UADDLL_OVERFLOW:
9736 case BUILT_IN_USUB_OVERFLOW:
9737 case BUILT_IN_USUBL_OVERFLOW:
9738 case BUILT_IN_USUBLL_OVERFLOW:
9739 case BUILT_IN_UMUL_OVERFLOW:
9740 case BUILT_IN_UMULL_OVERFLOW:
9741 case BUILT_IN_UMULLL_OVERFLOW:
9742 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9744 default:
9745 break;
9747 return NULL_TREE;
9750 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9751 arguments. IGNORE is true if the result of the
9752 function call is ignored. This function returns NULL_TREE if no
9753 simplification was possible. */
9755 tree
9756 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9758 tree ret = NULL_TREE;
9760 switch (nargs)
9762 case 0:
9763 ret = fold_builtin_0 (loc, fndecl);
9764 break;
9765 case 1:
9766 ret = fold_builtin_1 (loc, fndecl, args[0]);
9767 break;
9768 case 2:
9769 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9770 break;
9771 case 3:
9772 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9773 break;
9774 default:
9775 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9776 break;
9778 if (ret)
9780 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9781 SET_EXPR_LOCATION (ret, loc);
9782 return ret;
9784 return NULL_TREE;
9787 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9788 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9789 of arguments in ARGS to be omitted. OLDNARGS is the number of
9790 elements in ARGS. */
9792 static tree
9793 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9794 int skip, tree fndecl, int n, va_list newargs)
9796 int nargs = oldnargs - skip + n;
9797 tree *buffer;
9799 if (n > 0)
9801 int i, j;
9803 buffer = XALLOCAVEC (tree, nargs);
9804 for (i = 0; i < n; i++)
9805 buffer[i] = va_arg (newargs, tree);
9806 for (j = skip; j < oldnargs; j++, i++)
9807 buffer[i] = args[j];
9809 else
9810 buffer = args + skip;
9812 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9815 /* Return true if FNDECL shouldn't be folded right now.
9816 If a built-in function has an inline attribute always_inline
9817 wrapper, defer folding it after always_inline functions have
9818 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9819 might not be performed. */
9821 bool
9822 avoid_folding_inline_builtin (tree fndecl)
9824 return (DECL_DECLARED_INLINE_P (fndecl)
9825 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9826 && cfun
9827 && !cfun->always_inline_functions_inlined
9828 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9831 /* A wrapper function for builtin folding that prevents warnings for
9832 "statement without effect" and the like, caused by removing the
9833 call node earlier than the warning is generated. */
9835 tree
9836 fold_call_expr (location_t loc, tree exp, bool ignore)
9838 tree ret = NULL_TREE;
9839 tree fndecl = get_callee_fndecl (exp);
9840 if (fndecl && fndecl_built_in_p (fndecl)
9841 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9842 yet. Defer folding until we see all the arguments
9843 (after inlining). */
9844 && !CALL_EXPR_VA_ARG_PACK (exp))
9846 int nargs = call_expr_nargs (exp);
9848 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9849 instead last argument is __builtin_va_arg_pack (). Defer folding
9850 even in that case, until arguments are finalized. */
9851 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9853 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9854 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9855 return NULL_TREE;
9858 if (avoid_folding_inline_builtin (fndecl))
9859 return NULL_TREE;
9861 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9862 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9863 CALL_EXPR_ARGP (exp), ignore);
9864 else
9866 tree *args = CALL_EXPR_ARGP (exp);
9867 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9868 if (ret)
9869 return ret;
9872 return NULL_TREE;
9875 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9876 N arguments are passed in the array ARGARRAY. Return a folded
9877 expression or NULL_TREE if no simplification was possible. */
9879 tree
9880 fold_builtin_call_array (location_t loc, tree,
9881 tree fn,
9882 int n,
9883 tree *argarray)
9885 if (TREE_CODE (fn) != ADDR_EXPR)
9886 return NULL_TREE;
9888 tree fndecl = TREE_OPERAND (fn, 0);
9889 if (TREE_CODE (fndecl) == FUNCTION_DECL
9890 && fndecl_built_in_p (fndecl))
9892 /* If last argument is __builtin_va_arg_pack (), arguments to this
9893 function are not finalized yet. Defer folding until they are. */
9894 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9896 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9897 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9898 return NULL_TREE;
9900 if (avoid_folding_inline_builtin (fndecl))
9901 return NULL_TREE;
9902 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9903 return targetm.fold_builtin (fndecl, n, argarray, false);
9904 else
9905 return fold_builtin_n (loc, fndecl, argarray, n, false);
9908 return NULL_TREE;
9911 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9912 along with N new arguments specified as the "..." parameters. SKIP
9913 is the number of arguments in EXP to be omitted. This function is used
9914 to do varargs-to-varargs transformations. */
9916 static tree
9917 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9919 va_list ap;
9920 tree t;
9922 va_start (ap, n);
9923 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9924 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9925 va_end (ap);
9927 return t;
9930 /* Validate a single argument ARG against a tree code CODE representing
9931 a type. Return true when argument is valid. */
9933 static bool
9934 validate_arg (const_tree arg, enum tree_code code)
9936 if (!arg)
9937 return false;
9938 else if (code == POINTER_TYPE)
9939 return POINTER_TYPE_P (TREE_TYPE (arg));
9940 else if (code == INTEGER_TYPE)
9941 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9942 return code == TREE_CODE (TREE_TYPE (arg));
9945 /* This function validates the types of a function call argument list
9946 against a specified list of tree_codes. If the last specifier is a 0,
9947 that represents an ellipses, otherwise the last specifier must be a
9948 VOID_TYPE.
9950 This is the GIMPLE version of validate_arglist. Eventually we want to
9951 completely convert builtins.c to work from GIMPLEs and the tree based
9952 validate_arglist will then be removed. */
9954 bool
9955 validate_gimple_arglist (const gcall *call, ...)
9957 enum tree_code code;
9958 bool res = 0;
9959 va_list ap;
9960 const_tree arg;
9961 size_t i;
9963 va_start (ap, call);
9964 i = 0;
9968 code = (enum tree_code) va_arg (ap, int);
9969 switch (code)
9971 case 0:
9972 /* This signifies an ellipses, any further arguments are all ok. */
9973 res = true;
9974 goto end;
9975 case VOID_TYPE:
9976 /* This signifies an endlink, if no arguments remain, return
9977 true, otherwise return false. */
9978 res = (i == gimple_call_num_args (call));
9979 goto end;
9980 default:
9981 /* If no parameters remain or the parameter's code does not
9982 match the specified code, return false. Otherwise continue
9983 checking any remaining arguments. */
9984 arg = gimple_call_arg (call, i++);
9985 if (!validate_arg (arg, code))
9986 goto end;
9987 break;
9990 while (1);
9992 /* We need gotos here since we can only have one VA_CLOSE in a
9993 function. */
9994 end: ;
9995 va_end (ap);
9997 return res;
10000 /* Default target-specific builtin expander that does nothing. */
10003 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10004 rtx target ATTRIBUTE_UNUSED,
10005 rtx subtarget ATTRIBUTE_UNUSED,
10006 machine_mode mode ATTRIBUTE_UNUSED,
10007 int ignore ATTRIBUTE_UNUSED)
10009 return NULL_RTX;
10012 /* Returns true is EXP represents data that would potentially reside
10013 in a readonly section. */
10015 bool
10016 readonly_data_expr (tree exp)
10018 STRIP_NOPS (exp);
10020 if (TREE_CODE (exp) != ADDR_EXPR)
10021 return false;
10023 exp = get_base_address (TREE_OPERAND (exp, 0));
10024 if (!exp)
10025 return false;
10027 /* Make sure we call decl_readonly_section only for trees it
10028 can handle (since it returns true for everything it doesn't
10029 understand). */
10030 if (TREE_CODE (exp) == STRING_CST
10031 || TREE_CODE (exp) == CONSTRUCTOR
10032 || (VAR_P (exp) && TREE_STATIC (exp)))
10033 return decl_readonly_section (exp, 0);
10034 else
10035 return false;
10038 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10039 to the call, and TYPE is its return type.
10041 Return NULL_TREE if no simplification was possible, otherwise return the
10042 simplified form of the call as a tree.
10044 The simplified form may be a constant or other expression which
10045 computes the same value, but in a more efficient manner (including
10046 calls to other builtin functions).
10048 The call may contain arguments which need to be evaluated, but
10049 which are not useful to determine the result of the call. In
10050 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10051 COMPOUND_EXPR will be an argument which must be evaluated.
10052 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10053 COMPOUND_EXPR in the chain will contain the tree for the simplified
10054 form of the builtin function call. */
10056 static tree
10057 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
10059 if (!validate_arg (s1, POINTER_TYPE)
10060 || !validate_arg (s2, POINTER_TYPE))
10061 return NULL_TREE;
10062 else
10064 tree fn;
10065 const char *p1, *p2;
10067 p2 = c_getstr (s2);
10068 if (p2 == NULL)
10069 return NULL_TREE;
10071 p1 = c_getstr (s1);
10072 if (p1 != NULL)
10074 const char *r = strpbrk (p1, p2);
10075 tree tem;
10077 if (r == NULL)
10078 return build_int_cst (TREE_TYPE (s1), 0);
10080 /* Return an offset into the constant string argument. */
10081 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10082 return fold_convert_loc (loc, type, tem);
10085 if (p2[0] == '\0')
10086 /* strpbrk(x, "") == NULL.
10087 Evaluate and ignore s1 in case it had side-effects. */
10088 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
10090 if (p2[1] != '\0')
10091 return NULL_TREE; /* Really call strpbrk. */
10093 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10094 if (!fn)
10095 return NULL_TREE;
10097 /* New argument list transforming strpbrk(s1, s2) to
10098 strchr(s1, s2[0]). */
10099 return build_call_expr_loc (loc, fn, 2, s1,
10100 build_int_cst (integer_type_node, p2[0]));
10104 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10105 to the call.
10107 Return NULL_TREE if no simplification was possible, otherwise return the
10108 simplified form of the call as a tree.
10110 The simplified form may be a constant or other expression which
10111 computes the same value, but in a more efficient manner (including
10112 calls to other builtin functions).
10114 The call may contain arguments which need to be evaluated, but
10115 which are not useful to determine the result of the call. In
10116 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10117 COMPOUND_EXPR will be an argument which must be evaluated.
10118 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10119 COMPOUND_EXPR in the chain will contain the tree for the simplified
10120 form of the builtin function call. */
10122 static tree
10123 fold_builtin_strspn (location_t loc, tree s1, tree s2)
10125 if (!validate_arg (s1, POINTER_TYPE)
10126 || !validate_arg (s2, POINTER_TYPE))
10127 return NULL_TREE;
10128 else
10130 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10132 /* If either argument is "", return NULL_TREE. */
10133 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10134 /* Evaluate and ignore both arguments in case either one has
10135 side-effects. */
10136 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
10137 s1, s2);
10138 return NULL_TREE;
10142 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10143 to the call.
10145 Return NULL_TREE if no simplification was possible, otherwise return the
10146 simplified form of the call as a tree.
10148 The simplified form may be a constant or other expression which
10149 computes the same value, but in a more efficient manner (including
10150 calls to other builtin functions).
10152 The call may contain arguments which need to be evaluated, but
10153 which are not useful to determine the result of the call. In
10154 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10155 COMPOUND_EXPR will be an argument which must be evaluated.
10156 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10157 COMPOUND_EXPR in the chain will contain the tree for the simplified
10158 form of the builtin function call. */
10160 static tree
10161 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
10163 if (!validate_arg (s1, POINTER_TYPE)
10164 || !validate_arg (s2, POINTER_TYPE))
10165 return NULL_TREE;
10166 else
10168 /* If the first argument is "", return NULL_TREE. */
10169 const char *p1 = c_getstr (s1);
10170 if (p1 && *p1 == '\0')
10172 /* Evaluate and ignore argument s2 in case it has
10173 side-effects. */
10174 return omit_one_operand_loc (loc, size_type_node,
10175 size_zero_node, s2);
10178 /* If the second argument is "", return __builtin_strlen(s1). */
10179 const char *p2 = c_getstr (s2);
10180 if (p2 && *p2 == '\0')
10182 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
10184 /* If the replacement _DECL isn't initialized, don't do the
10185 transformation. */
10186 if (!fn)
10187 return NULL_TREE;
10189 return build_call_expr_loc (loc, fn, 1, s1);
10191 return NULL_TREE;
10195 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10196 produced. False otherwise. This is done so that we don't output the error
10197 or warning twice or three times. */
10199 bool
10200 fold_builtin_next_arg (tree exp, bool va_start_p)
10202 tree fntype = TREE_TYPE (current_function_decl);
10203 int nargs = call_expr_nargs (exp);
10204 tree arg;
10205 /* There is good chance the current input_location points inside the
10206 definition of the va_start macro (perhaps on the token for
10207 builtin) in a system header, so warnings will not be emitted.
10208 Use the location in real source code. */
10209 location_t current_location =
10210 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10211 NULL);
10213 if (!stdarg_p (fntype))
10215 error ("%<va_start%> used in function with fixed args");
10216 return true;
10219 if (va_start_p)
10221 if (va_start_p && (nargs != 2))
10223 error ("wrong number of arguments to function %<va_start%>");
10224 return true;
10226 arg = CALL_EXPR_ARG (exp, 1);
10228 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10229 when we checked the arguments and if needed issued a warning. */
10230 else
10232 if (nargs == 0)
10234 /* Evidently an out of date version of <stdarg.h>; can't validate
10235 va_start's second argument, but can still work as intended. */
10236 warning_at (current_location,
10237 OPT_Wvarargs,
10238 "%<__builtin_next_arg%> called without an argument");
10239 return true;
10241 else if (nargs > 1)
10243 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10244 return true;
10246 arg = CALL_EXPR_ARG (exp, 0);
10249 if (TREE_CODE (arg) == SSA_NAME)
10250 arg = SSA_NAME_VAR (arg);
10252 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10253 or __builtin_next_arg (0) the first time we see it, after checking
10254 the arguments and if needed issuing a warning. */
10255 if (!integer_zerop (arg))
10257 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10259 /* Strip off all nops for the sake of the comparison. This
10260 is not quite the same as STRIP_NOPS. It does more.
10261 We must also strip off INDIRECT_EXPR for C++ reference
10262 parameters. */
10263 while (CONVERT_EXPR_P (arg)
10264 || TREE_CODE (arg) == INDIRECT_REF)
10265 arg = TREE_OPERAND (arg, 0);
10266 if (arg != last_parm)
10268 /* FIXME: Sometimes with the tree optimizers we can get the
10269 not the last argument even though the user used the last
10270 argument. We just warn and set the arg to be the last
10271 argument so that we will get wrong-code because of
10272 it. */
10273 warning_at (current_location,
10274 OPT_Wvarargs,
10275 "second parameter of %<va_start%> not last named argument");
10278 /* Undefined by C99 7.15.1.4p4 (va_start):
10279 "If the parameter parmN is declared with the register storage
10280 class, with a function or array type, or with a type that is
10281 not compatible with the type that results after application of
10282 the default argument promotions, the behavior is undefined."
10284 else if (DECL_REGISTER (arg))
10286 warning_at (current_location,
10287 OPT_Wvarargs,
10288 "undefined behavior when second parameter of "
10289 "%<va_start%> is declared with %<register%> storage");
10292 /* We want to verify the second parameter just once before the tree
10293 optimizers are run and then avoid keeping it in the tree,
10294 as otherwise we could warn even for correct code like:
10295 void foo (int i, ...)
10296 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10297 if (va_start_p)
10298 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10299 else
10300 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10302 return false;
10306 /* Expand a call EXP to __builtin_object_size. */
10308 static rtx
10309 expand_builtin_object_size (tree exp)
10311 tree ost;
10312 int object_size_type;
10313 tree fndecl = get_callee_fndecl (exp);
10315 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10317 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
10318 exp, fndecl);
10319 expand_builtin_trap ();
10320 return const0_rtx;
10323 ost = CALL_EXPR_ARG (exp, 1);
10324 STRIP_NOPS (ost);
10326 if (TREE_CODE (ost) != INTEGER_CST
10327 || tree_int_cst_sgn (ost) < 0
10328 || compare_tree_int (ost, 3) > 0)
10330 error ("%Klast argument of %qD is not integer constant between 0 and 3",
10331 exp, fndecl);
10332 expand_builtin_trap ();
10333 return const0_rtx;
10336 object_size_type = tree_to_shwi (ost);
10338 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10341 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10342 FCODE is the BUILT_IN_* to use.
10343 Return NULL_RTX if we failed; the caller should emit a normal call,
10344 otherwise try to get the result in TARGET, if convenient (and in
10345 mode MODE if that's convenient). */
10347 static rtx
10348 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10349 enum built_in_function fcode)
10351 if (!validate_arglist (exp,
10352 POINTER_TYPE,
10353 fcode == BUILT_IN_MEMSET_CHK
10354 ? INTEGER_TYPE : POINTER_TYPE,
10355 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10356 return NULL_RTX;
10358 tree dest = CALL_EXPR_ARG (exp, 0);
10359 tree src = CALL_EXPR_ARG (exp, 1);
10360 tree len = CALL_EXPR_ARG (exp, 2);
10361 tree size = CALL_EXPR_ARG (exp, 3);
10363 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
10364 /*str=*/NULL_TREE, size);
10366 if (!tree_fits_uhwi_p (size))
10367 return NULL_RTX;
10369 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10371 /* Avoid transforming the checking call to an ordinary one when
10372 an overflow has been detected or when the call couldn't be
10373 validated because the size is not constant. */
10374 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10375 return NULL_RTX;
10377 tree fn = NULL_TREE;
10378 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10379 mem{cpy,pcpy,move,set} is available. */
10380 switch (fcode)
10382 case BUILT_IN_MEMCPY_CHK:
10383 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10384 break;
10385 case BUILT_IN_MEMPCPY_CHK:
10386 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10387 break;
10388 case BUILT_IN_MEMMOVE_CHK:
10389 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10390 break;
10391 case BUILT_IN_MEMSET_CHK:
10392 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10393 break;
10394 default:
10395 break;
10398 if (! fn)
10399 return NULL_RTX;
10401 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10402 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10403 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10404 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10406 else if (fcode == BUILT_IN_MEMSET_CHK)
10407 return NULL_RTX;
10408 else
10410 unsigned int dest_align = get_pointer_alignment (dest);
10412 /* If DEST is not a pointer type, call the normal function. */
10413 if (dest_align == 0)
10414 return NULL_RTX;
10416 /* If SRC and DEST are the same (and not volatile), do nothing. */
10417 if (operand_equal_p (src, dest, 0))
10419 tree expr;
10421 if (fcode != BUILT_IN_MEMPCPY_CHK)
10423 /* Evaluate and ignore LEN in case it has side-effects. */
10424 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10425 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10428 expr = fold_build_pointer_plus (dest, len);
10429 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10432 /* __memmove_chk special case. */
10433 if (fcode == BUILT_IN_MEMMOVE_CHK)
10435 unsigned int src_align = get_pointer_alignment (src);
10437 if (src_align == 0)
10438 return NULL_RTX;
10440 /* If src is categorized for a readonly section we can use
10441 normal __memcpy_chk. */
10442 if (readonly_data_expr (src))
10444 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10445 if (!fn)
10446 return NULL_RTX;
10447 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10448 dest, src, len, size);
10449 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10450 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10451 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10454 return NULL_RTX;
10458 /* Emit warning if a buffer overflow is detected at compile time. */
10460 static void
10461 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10463 /* The source string. */
10464 tree srcstr = NULL_TREE;
10465 /* The size of the destination object. */
10466 tree objsize = NULL_TREE;
10467 /* The string that is being concatenated with (as in __strcat_chk)
10468 or null if it isn't. */
10469 tree catstr = NULL_TREE;
10470 /* The maximum length of the source sequence in a bounded operation
10471 (such as __strncat_chk) or null if the operation isn't bounded
10472 (such as __strcat_chk). */
10473 tree maxread = NULL_TREE;
10474 /* The exact size of the access (such as in __strncpy_chk). */
10475 tree size = NULL_TREE;
10477 switch (fcode)
10479 case BUILT_IN_STRCPY_CHK:
10480 case BUILT_IN_STPCPY_CHK:
10481 srcstr = CALL_EXPR_ARG (exp, 1);
10482 objsize = CALL_EXPR_ARG (exp, 2);
10483 break;
10485 case BUILT_IN_STRCAT_CHK:
10486 /* For __strcat_chk the warning will be emitted only if overflowing
10487 by at least strlen (dest) + 1 bytes. */
10488 catstr = CALL_EXPR_ARG (exp, 0);
10489 srcstr = CALL_EXPR_ARG (exp, 1);
10490 objsize = CALL_EXPR_ARG (exp, 2);
10491 break;
10493 case BUILT_IN_STRNCAT_CHK:
10494 catstr = CALL_EXPR_ARG (exp, 0);
10495 srcstr = CALL_EXPR_ARG (exp, 1);
10496 maxread = CALL_EXPR_ARG (exp, 2);
10497 objsize = CALL_EXPR_ARG (exp, 3);
10498 break;
10500 case BUILT_IN_STRNCPY_CHK:
10501 case BUILT_IN_STPNCPY_CHK:
10502 srcstr = CALL_EXPR_ARG (exp, 1);
10503 size = CALL_EXPR_ARG (exp, 2);
10504 objsize = CALL_EXPR_ARG (exp, 3);
10505 break;
10507 case BUILT_IN_SNPRINTF_CHK:
10508 case BUILT_IN_VSNPRINTF_CHK:
10509 maxread = CALL_EXPR_ARG (exp, 1);
10510 objsize = CALL_EXPR_ARG (exp, 3);
10511 break;
10512 default:
10513 gcc_unreachable ();
10516 if (catstr && maxread)
10518 /* Check __strncat_chk. There is no way to determine the length
10519 of the string to which the source string is being appended so
10520 just warn when the length of the source string is not known. */
10521 check_strncat_sizes (exp, objsize);
10522 return;
10525 /* The destination argument is the first one for all built-ins above. */
10526 tree dst = CALL_EXPR_ARG (exp, 0);
10528 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
10531 /* Emit warning if a buffer overflow is detected at compile time
10532 in __sprintf_chk/__vsprintf_chk calls. */
10534 static void
10535 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10537 tree size, len, fmt;
10538 const char *fmt_str;
10539 int nargs = call_expr_nargs (exp);
10541 /* Verify the required arguments in the original call. */
10543 if (nargs < 4)
10544 return;
10545 size = CALL_EXPR_ARG (exp, 2);
10546 fmt = CALL_EXPR_ARG (exp, 3);
10548 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10549 return;
10551 /* Check whether the format is a literal string constant. */
10552 fmt_str = c_getstr (fmt);
10553 if (fmt_str == NULL)
10554 return;
10556 if (!init_target_chars ())
10557 return;
10559 /* If the format doesn't contain % args or %%, we know its size. */
10560 if (strchr (fmt_str, target_percent) == 0)
10561 len = build_int_cstu (size_type_node, strlen (fmt_str));
10562 /* If the format is "%s" and first ... argument is a string literal,
10563 we know it too. */
10564 else if (fcode == BUILT_IN_SPRINTF_CHK
10565 && strcmp (fmt_str, target_percent_s) == 0)
10567 tree arg;
10569 if (nargs < 5)
10570 return;
10571 arg = CALL_EXPR_ARG (exp, 4);
10572 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10573 return;
10575 len = c_strlen (arg, 1);
10576 if (!len || ! tree_fits_uhwi_p (len))
10577 return;
10579 else
10580 return;
10582 /* Add one for the terminating nul. */
10583 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10585 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
10586 /*maxread=*/NULL_TREE, len, size);
10589 /* Emit warning if a free is called with address of a variable. */
10591 static void
10592 maybe_emit_free_warning (tree exp)
10594 tree arg = CALL_EXPR_ARG (exp, 0);
10596 STRIP_NOPS (arg);
10597 if (TREE_CODE (arg) != ADDR_EXPR)
10598 return;
10600 arg = get_base_address (TREE_OPERAND (arg, 0));
10601 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
10602 return;
10604 if (SSA_VAR_P (arg))
10605 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10606 "%Kattempt to free a non-heap object %qD", exp, arg);
10607 else
10608 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10609 "%Kattempt to free a non-heap object", exp);
10612 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10613 if possible. */
10615 static tree
10616 fold_builtin_object_size (tree ptr, tree ost)
10618 unsigned HOST_WIDE_INT bytes;
10619 int object_size_type;
10621 if (!validate_arg (ptr, POINTER_TYPE)
10622 || !validate_arg (ost, INTEGER_TYPE))
10623 return NULL_TREE;
10625 STRIP_NOPS (ost);
10627 if (TREE_CODE (ost) != INTEGER_CST
10628 || tree_int_cst_sgn (ost) < 0
10629 || compare_tree_int (ost, 3) > 0)
10630 return NULL_TREE;
10632 object_size_type = tree_to_shwi (ost);
10634 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10635 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10636 and (size_t) 0 for types 2 and 3. */
10637 if (TREE_SIDE_EFFECTS (ptr))
10638 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10640 if (TREE_CODE (ptr) == ADDR_EXPR)
10642 compute_builtin_object_size (ptr, object_size_type, &bytes);
10643 if (wi::fits_to_tree_p (bytes, size_type_node))
10644 return build_int_cstu (size_type_node, bytes);
10646 else if (TREE_CODE (ptr) == SSA_NAME)
10648 /* If object size is not known yet, delay folding until
10649 later. Maybe subsequent passes will help determining
10650 it. */
10651 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10652 && wi::fits_to_tree_p (bytes, size_type_node))
10653 return build_int_cstu (size_type_node, bytes);
10656 return NULL_TREE;
10659 /* Builtins with folding operations that operate on "..." arguments
10660 need special handling; we need to store the arguments in a convenient
10661 data structure before attempting any folding. Fortunately there are
10662 only a few builtins that fall into this category. FNDECL is the
10663 function, EXP is the CALL_EXPR for the call. */
10665 static tree
10666 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10668 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10669 tree ret = NULL_TREE;
10671 switch (fcode)
10673 case BUILT_IN_FPCLASSIFY:
10674 ret = fold_builtin_fpclassify (loc, args, nargs);
10675 break;
10677 default:
10678 break;
10680 if (ret)
10682 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10683 SET_EXPR_LOCATION (ret, loc);
10684 TREE_NO_WARNING (ret) = 1;
10685 return ret;
10687 return NULL_TREE;
10690 /* Initialize format string characters in the target charset. */
10692 bool
10693 init_target_chars (void)
10695 static bool init;
10696 if (!init)
10698 target_newline = lang_hooks.to_target_charset ('\n');
10699 target_percent = lang_hooks.to_target_charset ('%');
10700 target_c = lang_hooks.to_target_charset ('c');
10701 target_s = lang_hooks.to_target_charset ('s');
10702 if (target_newline == 0 || target_percent == 0 || target_c == 0
10703 || target_s == 0)
10704 return false;
10706 target_percent_c[0] = target_percent;
10707 target_percent_c[1] = target_c;
10708 target_percent_c[2] = '\0';
10710 target_percent_s[0] = target_percent;
10711 target_percent_s[1] = target_s;
10712 target_percent_s[2] = '\0';
10714 target_percent_s_newline[0] = target_percent;
10715 target_percent_s_newline[1] = target_s;
10716 target_percent_s_newline[2] = target_newline;
10717 target_percent_s_newline[3] = '\0';
10719 init = true;
10721 return true;
10724 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10725 and no overflow/underflow occurred. INEXACT is true if M was not
10726 exactly calculated. TYPE is the tree type for the result. This
10727 function assumes that you cleared the MPFR flags and then
10728 calculated M to see if anything subsequently set a flag prior to
10729 entering this function. Return NULL_TREE if any checks fail. */
10731 static tree
10732 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10734 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10735 overflow/underflow occurred. If -frounding-math, proceed iff the
10736 result of calling FUNC was exact. */
10737 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10738 && (!flag_rounding_math || !inexact))
10740 REAL_VALUE_TYPE rr;
10742 real_from_mpfr (&rr, m, type, GMP_RNDN);
10743 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10744 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10745 but the mpft_t is not, then we underflowed in the
10746 conversion. */
10747 if (real_isfinite (&rr)
10748 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10750 REAL_VALUE_TYPE rmode;
10752 real_convert (&rmode, TYPE_MODE (type), &rr);
10753 /* Proceed iff the specified mode can hold the value. */
10754 if (real_identical (&rmode, &rr))
10755 return build_real (type, rmode);
10758 return NULL_TREE;
10761 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10762 number and no overflow/underflow occurred. INEXACT is true if M
10763 was not exactly calculated. TYPE is the tree type for the result.
10764 This function assumes that you cleared the MPFR flags and then
10765 calculated M to see if anything subsequently set a flag prior to
10766 entering this function. Return NULL_TREE if any checks fail, if
10767 FORCE_CONVERT is true, then bypass the checks. */
10769 static tree
10770 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10772 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10773 overflow/underflow occurred. If -frounding-math, proceed iff the
10774 result of calling FUNC was exact. */
10775 if (force_convert
10776 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10777 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10778 && (!flag_rounding_math || !inexact)))
10780 REAL_VALUE_TYPE re, im;
10782 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10783 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10784 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10785 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10786 but the mpft_t is not, then we underflowed in the
10787 conversion. */
10788 if (force_convert
10789 || (real_isfinite (&re) && real_isfinite (&im)
10790 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10791 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10793 REAL_VALUE_TYPE re_mode, im_mode;
10795 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10796 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10797 /* Proceed iff the specified mode can hold the value. */
10798 if (force_convert
10799 || (real_identical (&re_mode, &re)
10800 && real_identical (&im_mode, &im)))
10801 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10802 build_real (TREE_TYPE (type), im_mode));
10805 return NULL_TREE;
10808 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10809 the pointer *(ARG_QUO) and return the result. The type is taken
10810 from the type of ARG0 and is used for setting the precision of the
10811 calculation and results. */
10813 static tree
10814 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10816 tree const type = TREE_TYPE (arg0);
10817 tree result = NULL_TREE;
10819 STRIP_NOPS (arg0);
10820 STRIP_NOPS (arg1);
10822 /* To proceed, MPFR must exactly represent the target floating point
10823 format, which only happens when the target base equals two. */
10824 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10825 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10826 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10828 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10829 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10831 if (real_isfinite (ra0) && real_isfinite (ra1))
10833 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10834 const int prec = fmt->p;
10835 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10836 tree result_rem;
10837 long integer_quo;
10838 mpfr_t m0, m1;
10840 mpfr_inits2 (prec, m0, m1, NULL);
10841 mpfr_from_real (m0, ra0, GMP_RNDN);
10842 mpfr_from_real (m1, ra1, GMP_RNDN);
10843 mpfr_clear_flags ();
10844 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10845 /* Remquo is independent of the rounding mode, so pass
10846 inexact=0 to do_mpfr_ckconv(). */
10847 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10848 mpfr_clears (m0, m1, NULL);
10849 if (result_rem)
10851 /* MPFR calculates quo in the host's long so it may
10852 return more bits in quo than the target int can hold
10853 if sizeof(host long) > sizeof(target int). This can
10854 happen even for native compilers in LP64 mode. In
10855 these cases, modulo the quo value with the largest
10856 number that the target int can hold while leaving one
10857 bit for the sign. */
10858 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10859 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10861 /* Dereference the quo pointer argument. */
10862 arg_quo = build_fold_indirect_ref (arg_quo);
10863 /* Proceed iff a valid pointer type was passed in. */
10864 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10866 /* Set the value. */
10867 tree result_quo
10868 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10869 build_int_cst (TREE_TYPE (arg_quo),
10870 integer_quo));
10871 TREE_SIDE_EFFECTS (result_quo) = 1;
10872 /* Combine the quo assignment with the rem. */
10873 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10874 result_quo, result_rem));
10879 return result;
10882 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10883 resulting value as a tree with type TYPE. The mpfr precision is
10884 set to the precision of TYPE. We assume that this mpfr function
10885 returns zero if the result could be calculated exactly within the
10886 requested precision. In addition, the integer pointer represented
10887 by ARG_SG will be dereferenced and set to the appropriate signgam
10888 (-1,1) value. */
10890 static tree
10891 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10893 tree result = NULL_TREE;
10895 STRIP_NOPS (arg);
10897 /* To proceed, MPFR must exactly represent the target floating point
10898 format, which only happens when the target base equals two. Also
10899 verify ARG is a constant and that ARG_SG is an int pointer. */
10900 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10901 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10902 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10903 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10905 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10907 /* In addition to NaN and Inf, the argument cannot be zero or a
10908 negative integer. */
10909 if (real_isfinite (ra)
10910 && ra->cl != rvc_zero
10911 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10913 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10914 const int prec = fmt->p;
10915 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10916 int inexact, sg;
10917 mpfr_t m;
10918 tree result_lg;
10920 mpfr_init2 (m, prec);
10921 mpfr_from_real (m, ra, GMP_RNDN);
10922 mpfr_clear_flags ();
10923 inexact = mpfr_lgamma (m, &sg, m, rnd);
10924 result_lg = do_mpfr_ckconv (m, type, inexact);
10925 mpfr_clear (m);
10926 if (result_lg)
10928 tree result_sg;
10930 /* Dereference the arg_sg pointer argument. */
10931 arg_sg = build_fold_indirect_ref (arg_sg);
10932 /* Assign the signgam value into *arg_sg. */
10933 result_sg = fold_build2 (MODIFY_EXPR,
10934 TREE_TYPE (arg_sg), arg_sg,
10935 build_int_cst (TREE_TYPE (arg_sg), sg));
10936 TREE_SIDE_EFFECTS (result_sg) = 1;
10937 /* Combine the signgam assignment with the lgamma result. */
10938 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10939 result_sg, result_lg));
10944 return result;
10947 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10948 mpc function FUNC on it and return the resulting value as a tree
10949 with type TYPE. The mpfr precision is set to the precision of
10950 TYPE. We assume that function FUNC returns zero if the result
10951 could be calculated exactly within the requested precision. If
10952 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10953 in the arguments and/or results. */
10955 tree
10956 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10957 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10959 tree result = NULL_TREE;
10961 STRIP_NOPS (arg0);
10962 STRIP_NOPS (arg1);
10964 /* To proceed, MPFR must exactly represent the target floating point
10965 format, which only happens when the target base equals two. */
10966 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10967 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10968 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10969 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10970 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10972 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10973 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10974 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10975 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10977 if (do_nonfinite
10978 || (real_isfinite (re0) && real_isfinite (im0)
10979 && real_isfinite (re1) && real_isfinite (im1)))
10981 const struct real_format *const fmt =
10982 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10983 const int prec = fmt->p;
10984 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10985 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10986 int inexact;
10987 mpc_t m0, m1;
10989 mpc_init2 (m0, prec);
10990 mpc_init2 (m1, prec);
10991 mpfr_from_real (mpc_realref (m0), re0, rnd);
10992 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10993 mpfr_from_real (mpc_realref (m1), re1, rnd);
10994 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10995 mpfr_clear_flags ();
10996 inexact = func (m0, m0, m1, crnd);
10997 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10998 mpc_clear (m0);
10999 mpc_clear (m1);
11003 return result;
11006 /* A wrapper function for builtin folding that prevents warnings for
11007 "statement without effect" and the like, caused by removing the
11008 call node earlier than the warning is generated. */
11010 tree
11011 fold_call_stmt (gcall *stmt, bool ignore)
11013 tree ret = NULL_TREE;
11014 tree fndecl = gimple_call_fndecl (stmt);
11015 location_t loc = gimple_location (stmt);
11016 if (fndecl && fndecl_built_in_p (fndecl)
11017 && !gimple_call_va_arg_pack_p (stmt))
11019 int nargs = gimple_call_num_args (stmt);
11020 tree *args = (nargs > 0
11021 ? gimple_call_arg_ptr (stmt, 0)
11022 : &error_mark_node);
11024 if (avoid_folding_inline_builtin (fndecl))
11025 return NULL_TREE;
11026 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11028 return targetm.fold_builtin (fndecl, nargs, args, ignore);
11030 else
11032 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11033 if (ret)
11035 /* Propagate location information from original call to
11036 expansion of builtin. Otherwise things like
11037 maybe_emit_chk_warning, that operate on the expansion
11038 of a builtin, will use the wrong location information. */
11039 if (gimple_has_location (stmt))
11041 tree realret = ret;
11042 if (TREE_CODE (ret) == NOP_EXPR)
11043 realret = TREE_OPERAND (ret, 0);
11044 if (CAN_HAVE_LOCATION_P (realret)
11045 && !EXPR_HAS_LOCATION (realret))
11046 SET_EXPR_LOCATION (realret, loc);
11047 return realret;
11049 return ret;
11053 return NULL_TREE;
11056 /* Look up the function in builtin_decl that corresponds to DECL
11057 and set ASMSPEC as its user assembler name. DECL must be a
11058 function decl that declares a builtin. */
11060 void
11061 set_builtin_user_assembler_name (tree decl, const char *asmspec)
11063 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
11064 && asmspec != 0);
11066 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
11067 set_user_assembler_name (builtin, asmspec);
11069 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
11070 && INT_TYPE_SIZE < BITS_PER_WORD)
11072 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
11073 set_user_assembler_libfunc ("ffs", asmspec);
11074 set_optab_libfunc (ffs_optab, mode, "ffs");
11078 /* Return true if DECL is a builtin that expands to a constant or similarly
11079 simple code. */
11080 bool
11081 is_simple_builtin (tree decl)
11083 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
11084 switch (DECL_FUNCTION_CODE (decl))
11086 /* Builtins that expand to constants. */
11087 case BUILT_IN_CONSTANT_P:
11088 case BUILT_IN_EXPECT:
11089 case BUILT_IN_OBJECT_SIZE:
11090 case BUILT_IN_UNREACHABLE:
11091 /* Simple register moves or loads from stack. */
11092 case BUILT_IN_ASSUME_ALIGNED:
11093 case BUILT_IN_RETURN_ADDRESS:
11094 case BUILT_IN_EXTRACT_RETURN_ADDR:
11095 case BUILT_IN_FROB_RETURN_ADDR:
11096 case BUILT_IN_RETURN:
11097 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
11098 case BUILT_IN_FRAME_ADDRESS:
11099 case BUILT_IN_VA_END:
11100 case BUILT_IN_STACK_SAVE:
11101 case BUILT_IN_STACK_RESTORE:
11102 /* Exception state returns or moves registers around. */
11103 case BUILT_IN_EH_FILTER:
11104 case BUILT_IN_EH_POINTER:
11105 case BUILT_IN_EH_COPY_VALUES:
11106 return true;
11108 default:
11109 return false;
11112 return false;
11115 /* Return true if DECL is a builtin that is not expensive, i.e., they are
11116 most probably expanded inline into reasonably simple code. This is a
11117 superset of is_simple_builtin. */
11118 bool
11119 is_inexpensive_builtin (tree decl)
11121 if (!decl)
11122 return false;
11123 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
11124 return true;
11125 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11126 switch (DECL_FUNCTION_CODE (decl))
11128 case BUILT_IN_ABS:
11129 CASE_BUILT_IN_ALLOCA:
11130 case BUILT_IN_BSWAP16:
11131 case BUILT_IN_BSWAP32:
11132 case BUILT_IN_BSWAP64:
11133 case BUILT_IN_CLZ:
11134 case BUILT_IN_CLZIMAX:
11135 case BUILT_IN_CLZL:
11136 case BUILT_IN_CLZLL:
11137 case BUILT_IN_CTZ:
11138 case BUILT_IN_CTZIMAX:
11139 case BUILT_IN_CTZL:
11140 case BUILT_IN_CTZLL:
11141 case BUILT_IN_FFS:
11142 case BUILT_IN_FFSIMAX:
11143 case BUILT_IN_FFSL:
11144 case BUILT_IN_FFSLL:
11145 case BUILT_IN_IMAXABS:
11146 case BUILT_IN_FINITE:
11147 case BUILT_IN_FINITEF:
11148 case BUILT_IN_FINITEL:
11149 case BUILT_IN_FINITED32:
11150 case BUILT_IN_FINITED64:
11151 case BUILT_IN_FINITED128:
11152 case BUILT_IN_FPCLASSIFY:
11153 case BUILT_IN_ISFINITE:
11154 case BUILT_IN_ISINF_SIGN:
11155 case BUILT_IN_ISINF:
11156 case BUILT_IN_ISINFF:
11157 case BUILT_IN_ISINFL:
11158 case BUILT_IN_ISINFD32:
11159 case BUILT_IN_ISINFD64:
11160 case BUILT_IN_ISINFD128:
11161 case BUILT_IN_ISNAN:
11162 case BUILT_IN_ISNANF:
11163 case BUILT_IN_ISNANL:
11164 case BUILT_IN_ISNAND32:
11165 case BUILT_IN_ISNAND64:
11166 case BUILT_IN_ISNAND128:
11167 case BUILT_IN_ISNORMAL:
11168 case BUILT_IN_ISGREATER:
11169 case BUILT_IN_ISGREATEREQUAL:
11170 case BUILT_IN_ISLESS:
11171 case BUILT_IN_ISLESSEQUAL:
11172 case BUILT_IN_ISLESSGREATER:
11173 case BUILT_IN_ISUNORDERED:
11174 case BUILT_IN_VA_ARG_PACK:
11175 case BUILT_IN_VA_ARG_PACK_LEN:
11176 case BUILT_IN_VA_COPY:
11177 case BUILT_IN_TRAP:
11178 case BUILT_IN_SAVEREGS:
11179 case BUILT_IN_POPCOUNTL:
11180 case BUILT_IN_POPCOUNTLL:
11181 case BUILT_IN_POPCOUNTIMAX:
11182 case BUILT_IN_POPCOUNT:
11183 case BUILT_IN_PARITYL:
11184 case BUILT_IN_PARITYLL:
11185 case BUILT_IN_PARITYIMAX:
11186 case BUILT_IN_PARITY:
11187 case BUILT_IN_LABS:
11188 case BUILT_IN_LLABS:
11189 case BUILT_IN_PREFETCH:
11190 case BUILT_IN_ACC_ON_DEVICE:
11191 return true;
11193 default:
11194 return is_simple_builtin (decl);
11197 return false;
11200 /* Return true if T is a constant and the value cast to a target char
11201 can be represented by a host char.
11202 Store the casted char constant in *P if so. */
11204 bool
11205 target_char_cst_p (tree t, char *p)
11207 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
11208 return false;
11210 *p = (char)tree_to_uhwi (t);
11211 return true;
11214 /* Return the maximum object size. */
11216 tree
11217 max_object_size (void)
11219 /* To do: Make this a configurable parameter. */
11220 return TYPE_MAX_VALUE (ptrdiff_type_node);