PR target/87674
[official-gcc.git] / gcc / builtins.c
blobf64b3d4065b3f3977dbfe15587c4f49d5fabfb5d
1 /* Expand builtin functions.
2 Copyright (C) 1988-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "params.h"
35 #include "tm_p.h"
36 #include "stringpool.h"
37 #include "tree-vrp.h"
38 #include "tree-ssanames.h"
39 #include "expmed.h"
40 #include "optabs.h"
41 #include "emit-rtl.h"
42 #include "recog.h"
43 #include "diagnostic-core.h"
44 #include "alias.h"
45 #include "fold-const.h"
46 #include "fold-const-call.h"
47 #include "gimple-ssa-warn-restrict.h"
48 #include "stor-layout.h"
49 #include "calls.h"
50 #include "varasm.h"
51 #include "tree-object-size.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
76 struct target_builtins default_target_builtins;
77 #if SWITCHABLE_TARGET
78 struct target_builtins *this_target_builtins = &default_target_builtins;
79 #endif
81 /* Define the names of the builtin function types and codes. */
82 const char *const built_in_class_names[BUILT_IN_LAST]
83 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
85 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
86 const char * built_in_names[(int) END_BUILTINS] =
88 #include "builtins.def"
91 /* Setup an array of builtin_info_type, make sure each element decl is
92 initialized to NULL_TREE. */
93 builtin_info_type builtin_info[(int)END_BUILTINS];
95 /* Non-zero if __builtin_constant_p should be folded right away. */
96 bool force_folding_builtin_constant_p;
98 static rtx c_readstr (const char *, scalar_int_mode);
99 static int target_char_cast (tree, char *);
100 static rtx get_memory_rtx (tree, tree);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx result_vector (int, rtx);
104 static void expand_builtin_prefetch (tree);
105 static rtx expand_builtin_apply_args (void);
106 static rtx expand_builtin_apply_args_1 (void);
107 static rtx expand_builtin_apply (rtx, rtx, rtx);
108 static void expand_builtin_return (rtx);
109 static enum type_class type_to_class (tree);
110 static rtx expand_builtin_classify_type (tree);
111 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
112 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
113 static rtx expand_builtin_interclass_mathfn (tree, rtx);
114 static rtx expand_builtin_sincos (tree);
115 static rtx expand_builtin_cexpi (tree, rtx);
116 static rtx expand_builtin_int_roundingfn (tree, rtx);
117 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
118 static rtx expand_builtin_next_arg (void);
119 static rtx expand_builtin_va_start (tree);
120 static rtx expand_builtin_va_end (tree);
121 static rtx expand_builtin_va_copy (tree);
122 static rtx inline_expand_builtin_string_cmp (tree, rtx);
123 static rtx expand_builtin_strcmp (tree, rtx);
124 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
125 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
126 static rtx expand_builtin_memchr (tree, rtx);
127 static rtx expand_builtin_memcpy (tree, rtx);
128 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
129 rtx target, tree exp, int endp);
130 static rtx expand_builtin_memmove (tree, rtx);
131 static rtx expand_builtin_mempcpy (tree, rtx);
132 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, int);
133 static rtx expand_builtin_strcat (tree, rtx);
134 static rtx expand_builtin_strcpy (tree, rtx);
135 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
136 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
137 static rtx expand_builtin_stpncpy (tree, rtx);
138 static rtx expand_builtin_strncat (tree, rtx);
139 static rtx expand_builtin_strncpy (tree, rtx);
140 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
141 static rtx expand_builtin_memset (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
146 static rtx expand_builtin_alloca (tree);
147 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
148 static rtx expand_builtin_frame_address (tree, tree);
149 static tree stabilize_va_list_loc (location_t, tree, int);
150 static rtx expand_builtin_expect (tree, rtx);
151 static rtx expand_builtin_expect_with_probability (tree, rtx);
152 static tree fold_builtin_constant_p (tree);
153 static tree fold_builtin_classify_type (tree);
154 static tree fold_builtin_strlen (location_t, tree, tree);
155 static tree fold_builtin_inf (location_t, tree, int);
156 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
157 static bool validate_arg (const_tree, enum tree_code code);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_signbit (tree, rtx);
160 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
161 static tree fold_builtin_isascii (location_t, tree);
162 static tree fold_builtin_toascii (location_t, tree);
163 static tree fold_builtin_isdigit (location_t, tree);
164 static tree fold_builtin_fabs (location_t, tree, tree);
165 static tree fold_builtin_abs (location_t, tree, tree);
166 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
167 enum tree_code);
168 static tree fold_builtin_0 (location_t, tree);
169 static tree fold_builtin_1 (location_t, tree, tree);
170 static tree fold_builtin_2 (location_t, tree, tree, tree);
171 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
172 static tree fold_builtin_varargs (location_t, tree, tree*, int);
174 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
175 static tree fold_builtin_strspn (location_t, tree, tree);
176 static tree fold_builtin_strcspn (location_t, tree, tree);
178 static rtx expand_builtin_object_size (tree);
179 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
180 enum built_in_function);
181 static void maybe_emit_chk_warning (tree, enum built_in_function);
182 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
183 static void maybe_emit_free_warning (tree);
184 static tree fold_builtin_object_size (tree, tree);
186 unsigned HOST_WIDE_INT target_newline;
187 unsigned HOST_WIDE_INT target_percent;
188 static unsigned HOST_WIDE_INT target_c;
189 static unsigned HOST_WIDE_INT target_s;
190 char target_percent_c[3];
191 char target_percent_s[3];
192 char target_percent_s_newline[4];
193 static tree do_mpfr_remquo (tree, tree, tree);
194 static tree do_mpfr_lgamma_r (tree, tree, tree);
195 static void expand_builtin_sync_synchronize (void);
197 /* Return true if NAME starts with __builtin_ or __sync_. */
199 static bool
200 is_builtin_name (const char *name)
202 if (strncmp (name, "__builtin_", 10) == 0)
203 return true;
204 if (strncmp (name, "__sync_", 7) == 0)
205 return true;
206 if (strncmp (name, "__atomic_", 9) == 0)
207 return true;
208 return false;
211 /* Return true if NODE should be considered for inline expansion regardless
212 of the optimization level. This means whenever a function is invoked with
213 its "internal" name, which normally contains the prefix "__builtin". */
215 bool
216 called_as_built_in (tree node)
218 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
219 we want the name used to call the function, not the name it
220 will have. */
221 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
222 return is_builtin_name (name);
225 /* Compute values M and N such that M divides (address of EXP - N) and such
226 that N < M. If these numbers can be determined, store M in alignp and N in
227 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
228 *alignp and any bit-offset to *bitposp.
230 Note that the address (and thus the alignment) computed here is based
231 on the address to which a symbol resolves, whereas DECL_ALIGN is based
232 on the address at which an object is actually located. These two
233 addresses are not always the same. For example, on ARM targets,
234 the address &foo of a Thumb function foo() has the lowest bit set,
235 whereas foo() itself starts on an even address.
237 If ADDR_P is true we are taking the address of the memory reference EXP
238 and thus cannot rely on the access taking place. */
240 static bool
241 get_object_alignment_2 (tree exp, unsigned int *alignp,
242 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
244 poly_int64 bitsize, bitpos;
245 tree offset;
246 machine_mode mode;
247 int unsignedp, reversep, volatilep;
248 unsigned int align = BITS_PER_UNIT;
249 bool known_alignment = false;
251 /* Get the innermost object and the constant (bitpos) and possibly
252 variable (offset) offset of the access. */
253 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
254 &unsignedp, &reversep, &volatilep);
256 /* Extract alignment information from the innermost object and
257 possibly adjust bitpos and offset. */
258 if (TREE_CODE (exp) == FUNCTION_DECL)
260 /* Function addresses can encode extra information besides their
261 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
262 allows the low bit to be used as a virtual bit, we know
263 that the address itself must be at least 2-byte aligned. */
264 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
265 align = 2 * BITS_PER_UNIT;
267 else if (TREE_CODE (exp) == LABEL_DECL)
269 else if (TREE_CODE (exp) == CONST_DECL)
271 /* The alignment of a CONST_DECL is determined by its initializer. */
272 exp = DECL_INITIAL (exp);
273 align = TYPE_ALIGN (TREE_TYPE (exp));
274 if (CONSTANT_CLASS_P (exp))
275 align = targetm.constant_alignment (exp, align);
277 known_alignment = true;
279 else if (DECL_P (exp))
281 align = DECL_ALIGN (exp);
282 known_alignment = true;
284 else if (TREE_CODE (exp) == INDIRECT_REF
285 || TREE_CODE (exp) == MEM_REF
286 || TREE_CODE (exp) == TARGET_MEM_REF)
288 tree addr = TREE_OPERAND (exp, 0);
289 unsigned ptr_align;
290 unsigned HOST_WIDE_INT ptr_bitpos;
291 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
293 /* If the address is explicitely aligned, handle that. */
294 if (TREE_CODE (addr) == BIT_AND_EXPR
295 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
297 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
298 ptr_bitmask *= BITS_PER_UNIT;
299 align = least_bit_hwi (ptr_bitmask);
300 addr = TREE_OPERAND (addr, 0);
303 known_alignment
304 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
305 align = MAX (ptr_align, align);
307 /* Re-apply explicit alignment to the bitpos. */
308 ptr_bitpos &= ptr_bitmask;
310 /* The alignment of the pointer operand in a TARGET_MEM_REF
311 has to take the variable offset parts into account. */
312 if (TREE_CODE (exp) == TARGET_MEM_REF)
314 if (TMR_INDEX (exp))
316 unsigned HOST_WIDE_INT step = 1;
317 if (TMR_STEP (exp))
318 step = TREE_INT_CST_LOW (TMR_STEP (exp));
319 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
321 if (TMR_INDEX2 (exp))
322 align = BITS_PER_UNIT;
323 known_alignment = false;
326 /* When EXP is an actual memory reference then we can use
327 TYPE_ALIGN of a pointer indirection to derive alignment.
328 Do so only if get_pointer_alignment_1 did not reveal absolute
329 alignment knowledge and if using that alignment would
330 improve the situation. */
331 unsigned int talign;
332 if (!addr_p && !known_alignment
333 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
334 && talign > align)
335 align = talign;
336 else
338 /* Else adjust bitpos accordingly. */
339 bitpos += ptr_bitpos;
340 if (TREE_CODE (exp) == MEM_REF
341 || TREE_CODE (exp) == TARGET_MEM_REF)
342 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
345 else if (TREE_CODE (exp) == STRING_CST)
347 /* STRING_CST are the only constant objects we allow to be not
348 wrapped inside a CONST_DECL. */
349 align = TYPE_ALIGN (TREE_TYPE (exp));
350 if (CONSTANT_CLASS_P (exp))
351 align = targetm.constant_alignment (exp, align);
353 known_alignment = true;
356 /* If there is a non-constant offset part extract the maximum
357 alignment that can prevail. */
358 if (offset)
360 unsigned int trailing_zeros = tree_ctz (offset);
361 if (trailing_zeros < HOST_BITS_PER_INT)
363 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
364 if (inner)
365 align = MIN (align, inner);
369 /* Account for the alignment of runtime coefficients, so that the constant
370 bitpos is guaranteed to be accurate. */
371 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
372 if (alt_align != 0 && alt_align < align)
374 align = alt_align;
375 known_alignment = false;
378 *alignp = align;
379 *bitposp = bitpos.coeffs[0] & (align - 1);
380 return known_alignment;
383 /* For a memory reference expression EXP compute values M and N such that M
384 divides (&EXP - N) and such that N < M. If these numbers can be determined,
385 store M in alignp and N in *BITPOSP and return true. Otherwise return false
386 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
388 bool
389 get_object_alignment_1 (tree exp, unsigned int *alignp,
390 unsigned HOST_WIDE_INT *bitposp)
392 return get_object_alignment_2 (exp, alignp, bitposp, false);
395 /* Return the alignment in bits of EXP, an object. */
397 unsigned int
398 get_object_alignment (tree exp)
400 unsigned HOST_WIDE_INT bitpos = 0;
401 unsigned int align;
403 get_object_alignment_1 (exp, &align, &bitpos);
405 /* align and bitpos now specify known low bits of the pointer.
406 ptr & (align - 1) == bitpos. */
408 if (bitpos != 0)
409 align = least_bit_hwi (bitpos);
410 return align;
413 /* For a pointer valued expression EXP compute values M and N such that M
414 divides (EXP - N) and such that N < M. If these numbers can be determined,
415 store M in alignp and N in *BITPOSP and return true. Return false if
416 the results are just a conservative approximation.
418 If EXP is not a pointer, false is returned too. */
420 bool
421 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
422 unsigned HOST_WIDE_INT *bitposp)
424 STRIP_NOPS (exp);
426 if (TREE_CODE (exp) == ADDR_EXPR)
427 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
428 alignp, bitposp, true);
429 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
431 unsigned int align;
432 unsigned HOST_WIDE_INT bitpos;
433 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
434 &align, &bitpos);
435 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
436 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
437 else
439 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
440 if (trailing_zeros < HOST_BITS_PER_INT)
442 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
443 if (inner)
444 align = MIN (align, inner);
447 *alignp = align;
448 *bitposp = bitpos & (align - 1);
449 return res;
451 else if (TREE_CODE (exp) == SSA_NAME
452 && POINTER_TYPE_P (TREE_TYPE (exp)))
454 unsigned int ptr_align, ptr_misalign;
455 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
457 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
459 *bitposp = ptr_misalign * BITS_PER_UNIT;
460 *alignp = ptr_align * BITS_PER_UNIT;
461 /* Make sure to return a sensible alignment when the multiplication
462 by BITS_PER_UNIT overflowed. */
463 if (*alignp == 0)
464 *alignp = 1u << (HOST_BITS_PER_INT - 1);
465 /* We cannot really tell whether this result is an approximation. */
466 return false;
468 else
470 *bitposp = 0;
471 *alignp = BITS_PER_UNIT;
472 return false;
475 else if (TREE_CODE (exp) == INTEGER_CST)
477 *alignp = BIGGEST_ALIGNMENT;
478 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
479 & (BIGGEST_ALIGNMENT - 1));
480 return true;
483 *bitposp = 0;
484 *alignp = BITS_PER_UNIT;
485 return false;
488 /* Return the alignment in bits of EXP, a pointer valued expression.
489 The alignment returned is, by default, the alignment of the thing that
490 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
492 Otherwise, look at the expression to see if we can do better, i.e., if the
493 expression is actually pointing at an object whose alignment is tighter. */
495 unsigned int
496 get_pointer_alignment (tree exp)
498 unsigned HOST_WIDE_INT bitpos = 0;
499 unsigned int align;
501 get_pointer_alignment_1 (exp, &align, &bitpos);
503 /* align and bitpos now specify known low bits of the pointer.
504 ptr & (align - 1) == bitpos. */
506 if (bitpos != 0)
507 align = least_bit_hwi (bitpos);
509 return align;
512 /* Return the number of leading non-zero elements in the sequence
513 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
514 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
516 unsigned
517 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
519 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
521 unsigned n;
523 if (eltsize == 1)
525 /* Optimize the common case of plain char. */
526 for (n = 0; n < maxelts; n++)
528 const char *elt = (const char*) ptr + n;
529 if (!*elt)
530 break;
533 else
535 for (n = 0; n < maxelts; n++)
537 const char *elt = (const char*) ptr + n * eltsize;
538 if (!memcmp (elt, "\0\0\0\0", eltsize))
539 break;
542 return n;
545 /* For a call at LOC to a function FN that expects a string in the argument
546 ARG, issue a diagnostic due to it being a called with an argument
547 declared at NONSTR that is a character array with no terminating NUL. */
549 void
550 warn_string_no_nul (location_t loc, const char *fn, tree arg, tree decl)
552 if (TREE_NO_WARNING (arg))
553 return;
555 loc = expansion_point_location_if_in_system_header (loc);
557 if (warning_at (loc, OPT_Wstringop_overflow_,
558 "%qs argument missing terminating nul", fn))
560 inform (DECL_SOURCE_LOCATION (decl),
561 "referenced argument declared here");
562 TREE_NO_WARNING (arg) = 1;
566 /* If EXP refers to an unterminated constant character array return
567 the declaration of the object of which the array is a member or
568 element and if SIZE is not null, set *SIZE to the size of
569 the unterminated array and set *EXACT if the size is exact or
570 clear it otherwise. Otherwise return null. */
572 tree
573 unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */)
575 /* C_STRLEN will return NULL and set DECL in the info
576 structure if EXP references a unterminated array. */
577 c_strlen_data data;
578 memset (&data, 0, sizeof (c_strlen_data));
579 tree len = c_strlen (exp, 1, &data);
580 if (len == NULL_TREE && data.len && data.decl)
582 if (size)
584 len = data.len;
585 if (data.off)
587 /* Constant offsets are already accounted for in data.len, but
588 not in a SSA_NAME + CST expression. */
589 if (TREE_CODE (data.off) == INTEGER_CST)
590 *exact = true;
591 else if (TREE_CODE (data.off) == PLUS_EXPR
592 && TREE_CODE (TREE_OPERAND (data.off, 1)) == INTEGER_CST)
594 /* Subtract the offset from the size of the array. */
595 *exact = false;
596 tree temp = TREE_OPERAND (data.off, 1);
597 temp = fold_convert (ssizetype, temp);
598 len = fold_build2 (MINUS_EXPR, ssizetype, len, temp);
600 else
601 *exact = false;
603 else
604 *exact = true;
606 *size = len;
608 return data.decl;
611 return NULL_TREE;
614 /* Compute the length of a null-terminated character string or wide
615 character string handling character sizes of 1, 2, and 4 bytes.
616 TREE_STRING_LENGTH is not the right way because it evaluates to
617 the size of the character array in bytes (as opposed to characters)
618 and because it can contain a zero byte in the middle.
620 ONLY_VALUE should be nonzero if the result is not going to be emitted
621 into the instruction stream and zero if it is going to be expanded.
622 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
623 is returned, otherwise NULL, since
624 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
625 evaluate the side-effects.
627 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
628 accesses. Note that this implies the result is not going to be emitted
629 into the instruction stream.
631 Additional information about the string accessed may be recorded
632 in DATA. For example, if SRC references an unterminated string,
633 then the declaration will be stored in the DECL field. If the
634 length of the unterminated string can be determined, it'll be
635 stored in the LEN field. Note this length could well be different
636 than what a C strlen call would return.
638 ELTSIZE is 1 for normal single byte character strings, and 2 or
639 4 for wide characer strings. ELTSIZE is by default 1.
641 The value returned is of type `ssizetype'. */
643 tree
644 c_strlen (tree src, int only_value, c_strlen_data *data, unsigned eltsize)
646 /* If we were not passed a DATA pointer, then get one to a local
647 structure. That avoids having to check DATA for NULL before
648 each time we want to use it. */
649 c_strlen_data local_strlen_data;
650 memset (&local_strlen_data, 0, sizeof (c_strlen_data));
651 if (!data)
652 data = &local_strlen_data;
654 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
655 STRIP_NOPS (src);
656 if (TREE_CODE (src) == COND_EXPR
657 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
659 tree len1, len2;
661 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
662 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
663 if (tree_int_cst_equal (len1, len2))
664 return len1;
667 if (TREE_CODE (src) == COMPOUND_EXPR
668 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
669 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
671 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
673 /* Offset from the beginning of the string in bytes. */
674 tree byteoff;
675 tree memsize;
676 tree decl;
677 src = string_constant (src, &byteoff, &memsize, &decl);
678 if (src == 0)
679 return NULL_TREE;
681 /* Determine the size of the string element. */
682 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
683 return NULL_TREE;
685 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
686 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
687 in case the latter is less than the size of the array, such as when
688 SRC refers to a short string literal used to initialize a large array.
689 In that case, the elements of the array after the terminating NUL are
690 all NUL. */
691 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
692 strelts = strelts / eltsize;
694 if (!tree_fits_uhwi_p (memsize))
695 return NULL_TREE;
697 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
699 /* PTR can point to the byte representation of any string type, including
700 char* and wchar_t*. */
701 const char *ptr = TREE_STRING_POINTER (src);
703 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
705 /* The code below works only for single byte character types. */
706 if (eltsize != 1)
707 return NULL_TREE;
709 /* If the string has an internal NUL character followed by any
710 non-NUL characters (e.g., "foo\0bar"), we can't compute
711 the offset to the following NUL if we don't know where to
712 start searching for it. */
713 unsigned len = string_length (ptr, eltsize, strelts);
715 /* Return when an embedded null character is found or none at all.
716 In the latter case, set the DECL/LEN field in the DATA structure
717 so that callers may examine them. */
718 if (len + 1 < strelts)
719 return NULL_TREE;
720 else if (len >= maxelts)
722 data->decl = decl;
723 data->off = byteoff;
724 data->len = ssize_int (len);
725 return NULL_TREE;
728 /* For empty strings the result should be zero. */
729 if (len == 0)
730 return ssize_int (0);
732 /* We don't know the starting offset, but we do know that the string
733 has no internal zero bytes. If the offset falls within the bounds
734 of the string subtract the offset from the length of the string,
735 and return that. Otherwise the length is zero. Take care to
736 use SAVE_EXPR in case the OFFSET has side-effects. */
737 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff) : byteoff;
738 offsave = fold_convert (ssizetype, offsave);
739 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
740 build_int_cst (ssizetype, len));
741 tree lenexp = size_diffop_loc (loc, ssize_int (len), offsave);
742 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
743 build_zero_cst (ssizetype));
746 /* Offset from the beginning of the string in elements. */
747 HOST_WIDE_INT eltoff;
749 /* We have a known offset into the string. Start searching there for
750 a null character if we can represent it as a single HOST_WIDE_INT. */
751 if (byteoff == 0)
752 eltoff = 0;
753 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
754 eltoff = -1;
755 else
756 eltoff = tree_to_uhwi (byteoff) / eltsize;
758 /* If the offset is known to be out of bounds, warn, and call strlen at
759 runtime. */
760 if (eltoff < 0 || eltoff >= maxelts)
762 /* Suppress multiple warnings for propagated constant strings. */
763 if (only_value != 2
764 && !TREE_NO_WARNING (src))
766 warning_at (loc, OPT_Warray_bounds,
767 "offset %qwi outside bounds of constant string",
768 eltoff);
769 TREE_NO_WARNING (src) = 1;
771 return NULL_TREE;
774 /* If eltoff is larger than strelts but less than maxelts the
775 string length is zero, since the excess memory will be zero. */
776 if (eltoff > strelts)
777 return ssize_int (0);
779 /* Use strlen to search for the first zero byte. Since any strings
780 constructed with build_string will have nulls appended, we win even
781 if we get handed something like (char[4])"abcd".
783 Since ELTOFF is our starting index into the string, no further
784 calculation is needed. */
785 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
786 strelts - eltoff);
788 /* Don't know what to return if there was no zero termination.
789 Ideally this would turn into a gcc_checking_assert over time.
790 Set DECL/LEN so callers can examine them. */
791 if (len >= maxelts - eltoff)
793 data->decl = decl;
794 data->off = byteoff;
795 data->len = ssize_int (len);
796 return NULL_TREE;
799 return ssize_int (len);
802 /* Return a constant integer corresponding to target reading
803 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
805 static rtx
806 c_readstr (const char *str, scalar_int_mode mode)
808 HOST_WIDE_INT ch;
809 unsigned int i, j;
810 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
812 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
813 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
814 / HOST_BITS_PER_WIDE_INT;
816 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
817 for (i = 0; i < len; i++)
818 tmp[i] = 0;
820 ch = 1;
821 for (i = 0; i < GET_MODE_SIZE (mode); i++)
823 j = i;
824 if (WORDS_BIG_ENDIAN)
825 j = GET_MODE_SIZE (mode) - i - 1;
826 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
827 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
828 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
829 j *= BITS_PER_UNIT;
831 if (ch)
832 ch = (unsigned char) str[i];
833 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
836 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
837 return immed_wide_int_const (c, mode);
840 /* Cast a target constant CST to target CHAR and if that value fits into
841 host char type, return zero and put that value into variable pointed to by
842 P. */
844 static int
845 target_char_cast (tree cst, char *p)
847 unsigned HOST_WIDE_INT val, hostval;
849 if (TREE_CODE (cst) != INTEGER_CST
850 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
851 return 1;
853 /* Do not care if it fits or not right here. */
854 val = TREE_INT_CST_LOW (cst);
856 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
857 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
859 hostval = val;
860 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
861 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
863 if (val != hostval)
864 return 1;
866 *p = hostval;
867 return 0;
870 /* Similar to save_expr, but assumes that arbitrary code is not executed
871 in between the multiple evaluations. In particular, we assume that a
872 non-addressable local variable will not be modified. */
874 static tree
875 builtin_save_expr (tree exp)
877 if (TREE_CODE (exp) == SSA_NAME
878 || (TREE_ADDRESSABLE (exp) == 0
879 && (TREE_CODE (exp) == PARM_DECL
880 || (VAR_P (exp) && !TREE_STATIC (exp)))))
881 return exp;
883 return save_expr (exp);
886 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
887 times to get the address of either a higher stack frame, or a return
888 address located within it (depending on FNDECL_CODE). */
890 static rtx
891 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
893 int i;
894 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
895 if (tem == NULL_RTX)
897 /* For a zero count with __builtin_return_address, we don't care what
898 frame address we return, because target-specific definitions will
899 override us. Therefore frame pointer elimination is OK, and using
900 the soft frame pointer is OK.
902 For a nonzero count, or a zero count with __builtin_frame_address,
903 we require a stable offset from the current frame pointer to the
904 previous one, so we must use the hard frame pointer, and
905 we must disable frame pointer elimination. */
906 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
907 tem = frame_pointer_rtx;
908 else
910 tem = hard_frame_pointer_rtx;
912 /* Tell reload not to eliminate the frame pointer. */
913 crtl->accesses_prior_frames = 1;
917 if (count > 0)
918 SETUP_FRAME_ADDRESSES ();
920 /* On the SPARC, the return address is not in the frame, it is in a
921 register. There is no way to access it off of the current frame
922 pointer, but it can be accessed off the previous frame pointer by
923 reading the value from the register window save area. */
924 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
925 count--;
927 /* Scan back COUNT frames to the specified frame. */
928 for (i = 0; i < count; i++)
930 /* Assume the dynamic chain pointer is in the word that the
931 frame address points to, unless otherwise specified. */
932 tem = DYNAMIC_CHAIN_ADDRESS (tem);
933 tem = memory_address (Pmode, tem);
934 tem = gen_frame_mem (Pmode, tem);
935 tem = copy_to_reg (tem);
938 /* For __builtin_frame_address, return what we've got. But, on
939 the SPARC for example, we may have to add a bias. */
940 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
941 return FRAME_ADDR_RTX (tem);
943 /* For __builtin_return_address, get the return address from that frame. */
944 #ifdef RETURN_ADDR_RTX
945 tem = RETURN_ADDR_RTX (count, tem);
946 #else
947 tem = memory_address (Pmode,
948 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
949 tem = gen_frame_mem (Pmode, tem);
950 #endif
951 return tem;
954 /* Alias set used for setjmp buffer. */
955 static alias_set_type setjmp_alias_set = -1;
957 /* Construct the leading half of a __builtin_setjmp call. Control will
958 return to RECEIVER_LABEL. This is also called directly by the SJLJ
959 exception handling code. */
961 void
962 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
964 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
965 rtx stack_save;
966 rtx mem;
968 if (setjmp_alias_set == -1)
969 setjmp_alias_set = new_alias_set ();
971 buf_addr = convert_memory_address (Pmode, buf_addr);
973 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
975 /* We store the frame pointer and the address of receiver_label in
976 the buffer and use the rest of it for the stack save area, which
977 is machine-dependent. */
979 mem = gen_rtx_MEM (Pmode, buf_addr);
980 set_mem_alias_set (mem, setjmp_alias_set);
981 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
983 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
984 GET_MODE_SIZE (Pmode))),
985 set_mem_alias_set (mem, setjmp_alias_set);
987 emit_move_insn (validize_mem (mem),
988 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
990 stack_save = gen_rtx_MEM (sa_mode,
991 plus_constant (Pmode, buf_addr,
992 2 * GET_MODE_SIZE (Pmode)));
993 set_mem_alias_set (stack_save, setjmp_alias_set);
994 emit_stack_save (SAVE_NONLOCAL, &stack_save);
996 /* If there is further processing to do, do it. */
997 if (targetm.have_builtin_setjmp_setup ())
998 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
1000 /* We have a nonlocal label. */
1001 cfun->has_nonlocal_label = 1;
1004 /* Construct the trailing part of a __builtin_setjmp call. This is
1005 also called directly by the SJLJ exception handling code.
1006 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
1008 void
1009 expand_builtin_setjmp_receiver (rtx receiver_label)
1011 rtx chain;
1013 /* Mark the FP as used when we get here, so we have to make sure it's
1014 marked as used by this function. */
1015 emit_use (hard_frame_pointer_rtx);
1017 /* Mark the static chain as clobbered here so life information
1018 doesn't get messed up for it. */
1019 chain = rtx_for_static_chain (current_function_decl, true);
1020 if (chain && REG_P (chain))
1021 emit_clobber (chain);
1023 /* Now put in the code to restore the frame pointer, and argument
1024 pointer, if needed. */
1025 if (! targetm.have_nonlocal_goto ())
1027 /* First adjust our frame pointer to its actual value. It was
1028 previously set to the start of the virtual area corresponding to
1029 the stacked variables when we branched here and now needs to be
1030 adjusted to the actual hardware fp value.
1032 Assignments to virtual registers are converted by
1033 instantiate_virtual_regs into the corresponding assignment
1034 to the underlying register (fp in this case) that makes
1035 the original assignment true.
1036 So the following insn will actually be decrementing fp by
1037 TARGET_STARTING_FRAME_OFFSET. */
1038 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
1040 /* Restoring the frame pointer also modifies the hard frame pointer.
1041 Mark it used (so that the previous assignment remains live once
1042 the frame pointer is eliminated) and clobbered (to represent the
1043 implicit update from the assignment). */
1044 emit_use (hard_frame_pointer_rtx);
1045 emit_clobber (hard_frame_pointer_rtx);
1048 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
1050 /* If the argument pointer can be eliminated in favor of the
1051 frame pointer, we don't need to restore it. We assume here
1052 that if such an elimination is present, it can always be used.
1053 This is the case on all known machines; if we don't make this
1054 assumption, we do unnecessary saving on many machines. */
1055 size_t i;
1056 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1058 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1059 if (elim_regs[i].from == ARG_POINTER_REGNUM
1060 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1061 break;
1063 if (i == ARRAY_SIZE (elim_regs))
1065 /* Now restore our arg pointer from the address at which it
1066 was saved in our stack frame. */
1067 emit_move_insn (crtl->args.internal_arg_pointer,
1068 copy_to_reg (get_arg_pointer_save_area ()));
1072 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
1073 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
1074 else if (targetm.have_nonlocal_goto_receiver ())
1075 emit_insn (targetm.gen_nonlocal_goto_receiver ());
1076 else
1077 { /* Nothing */ }
1079 /* We must not allow the code we just generated to be reordered by
1080 scheduling. Specifically, the update of the frame pointer must
1081 happen immediately, not later. */
1082 emit_insn (gen_blockage ());
1085 /* __builtin_longjmp is passed a pointer to an array of five words (not
1086 all will be used on all machines). It operates similarly to the C
1087 library function of the same name, but is more efficient. Much of
1088 the code below is copied from the handling of non-local gotos. */
1090 static void
1091 expand_builtin_longjmp (rtx buf_addr, rtx value)
1093 rtx fp, lab, stack;
1094 rtx_insn *insn, *last;
1095 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1097 /* DRAP is needed for stack realign if longjmp is expanded to current
1098 function */
1099 if (SUPPORTS_STACK_ALIGNMENT)
1100 crtl->need_drap = true;
1102 if (setjmp_alias_set == -1)
1103 setjmp_alias_set = new_alias_set ();
1105 buf_addr = convert_memory_address (Pmode, buf_addr);
1107 buf_addr = force_reg (Pmode, buf_addr);
1109 /* We require that the user must pass a second argument of 1, because
1110 that is what builtin_setjmp will return. */
1111 gcc_assert (value == const1_rtx);
1113 last = get_last_insn ();
1114 if (targetm.have_builtin_longjmp ())
1115 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1116 else
1118 fp = gen_rtx_MEM (Pmode, buf_addr);
1119 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1120 GET_MODE_SIZE (Pmode)));
1122 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1123 2 * GET_MODE_SIZE (Pmode)));
1124 set_mem_alias_set (fp, setjmp_alias_set);
1125 set_mem_alias_set (lab, setjmp_alias_set);
1126 set_mem_alias_set (stack, setjmp_alias_set);
1128 /* Pick up FP, label, and SP from the block and jump. This code is
1129 from expand_goto in stmt.c; see there for detailed comments. */
1130 if (targetm.have_nonlocal_goto ())
1131 /* We have to pass a value to the nonlocal_goto pattern that will
1132 get copied into the static_chain pointer, but it does not matter
1133 what that value is, because builtin_setjmp does not use it. */
1134 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1135 else
1137 lab = copy_to_reg (lab);
1139 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1140 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1142 emit_move_insn (hard_frame_pointer_rtx, fp);
1143 emit_stack_restore (SAVE_NONLOCAL, stack);
1145 emit_use (hard_frame_pointer_rtx);
1146 emit_use (stack_pointer_rtx);
1147 emit_indirect_jump (lab);
1151 /* Search backwards and mark the jump insn as a non-local goto.
1152 Note that this precludes the use of __builtin_longjmp to a
1153 __builtin_setjmp target in the same function. However, we've
1154 already cautioned the user that these functions are for
1155 internal exception handling use only. */
1156 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1158 gcc_assert (insn != last);
1160 if (JUMP_P (insn))
1162 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1163 break;
1165 else if (CALL_P (insn))
1166 break;
1170 static inline bool
1171 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1173 return (iter->i < iter->n);
1176 /* This function validates the types of a function call argument list
1177 against a specified list of tree_codes. If the last specifier is a 0,
1178 that represents an ellipsis, otherwise the last specifier must be a
1179 VOID_TYPE. */
1181 static bool
1182 validate_arglist (const_tree callexpr, ...)
1184 enum tree_code code;
1185 bool res = 0;
1186 va_list ap;
1187 const_call_expr_arg_iterator iter;
1188 const_tree arg;
1190 va_start (ap, callexpr);
1191 init_const_call_expr_arg_iterator (callexpr, &iter);
1193 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1194 tree fn = CALL_EXPR_FN (callexpr);
1195 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1197 for (unsigned argno = 1; ; ++argno)
1199 code = (enum tree_code) va_arg (ap, int);
1201 switch (code)
1203 case 0:
1204 /* This signifies an ellipses, any further arguments are all ok. */
1205 res = true;
1206 goto end;
1207 case VOID_TYPE:
1208 /* This signifies an endlink, if no arguments remain, return
1209 true, otherwise return false. */
1210 res = !more_const_call_expr_args_p (&iter);
1211 goto end;
1212 case POINTER_TYPE:
1213 /* The actual argument must be nonnull when either the whole
1214 called function has been declared nonnull, or when the formal
1215 argument corresponding to the actual argument has been. */
1216 if (argmap
1217 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1219 arg = next_const_call_expr_arg (&iter);
1220 if (!validate_arg (arg, code) || integer_zerop (arg))
1221 goto end;
1222 break;
1224 /* FALLTHRU */
1225 default:
1226 /* If no parameters remain or the parameter's code does not
1227 match the specified code, return false. Otherwise continue
1228 checking any remaining arguments. */
1229 arg = next_const_call_expr_arg (&iter);
1230 if (!validate_arg (arg, code))
1231 goto end;
1232 break;
1236 /* We need gotos here since we can only have one VA_CLOSE in a
1237 function. */
1238 end: ;
1239 va_end (ap);
1241 BITMAP_FREE (argmap);
1243 return res;
1246 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1247 and the address of the save area. */
1249 static rtx
1250 expand_builtin_nonlocal_goto (tree exp)
1252 tree t_label, t_save_area;
1253 rtx r_label, r_save_area, r_fp, r_sp;
1254 rtx_insn *insn;
1256 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1257 return NULL_RTX;
1259 t_label = CALL_EXPR_ARG (exp, 0);
1260 t_save_area = CALL_EXPR_ARG (exp, 1);
1262 r_label = expand_normal (t_label);
1263 r_label = convert_memory_address (Pmode, r_label);
1264 r_save_area = expand_normal (t_save_area);
1265 r_save_area = convert_memory_address (Pmode, r_save_area);
1266 /* Copy the address of the save location to a register just in case it was
1267 based on the frame pointer. */
1268 r_save_area = copy_to_reg (r_save_area);
1269 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1270 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1271 plus_constant (Pmode, r_save_area,
1272 GET_MODE_SIZE (Pmode)));
1274 crtl->has_nonlocal_goto = 1;
1276 /* ??? We no longer need to pass the static chain value, afaik. */
1277 if (targetm.have_nonlocal_goto ())
1278 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1279 else
1281 r_label = copy_to_reg (r_label);
1283 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1284 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1286 /* Restore frame pointer for containing function. */
1287 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1288 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1290 /* USE of hard_frame_pointer_rtx added for consistency;
1291 not clear if really needed. */
1292 emit_use (hard_frame_pointer_rtx);
1293 emit_use (stack_pointer_rtx);
1295 /* If the architecture is using a GP register, we must
1296 conservatively assume that the target function makes use of it.
1297 The prologue of functions with nonlocal gotos must therefore
1298 initialize the GP register to the appropriate value, and we
1299 must then make sure that this value is live at the point
1300 of the jump. (Note that this doesn't necessarily apply
1301 to targets with a nonlocal_goto pattern; they are free
1302 to implement it in their own way. Note also that this is
1303 a no-op if the GP register is a global invariant.) */
1304 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1305 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1306 emit_use (pic_offset_table_rtx);
1308 emit_indirect_jump (r_label);
1311 /* Search backwards to the jump insn and mark it as a
1312 non-local goto. */
1313 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1315 if (JUMP_P (insn))
1317 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1318 break;
1320 else if (CALL_P (insn))
1321 break;
1324 return const0_rtx;
1327 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1328 (not all will be used on all machines) that was passed to __builtin_setjmp.
1329 It updates the stack pointer in that block to the current value. This is
1330 also called directly by the SJLJ exception handling code. */
1332 void
1333 expand_builtin_update_setjmp_buf (rtx buf_addr)
1335 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1336 buf_addr = convert_memory_address (Pmode, buf_addr);
1337 rtx stack_save
1338 = gen_rtx_MEM (sa_mode,
1339 memory_address
1340 (sa_mode,
1341 plus_constant (Pmode, buf_addr,
1342 2 * GET_MODE_SIZE (Pmode))));
1344 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1347 /* Expand a call to __builtin_prefetch. For a target that does not support
1348 data prefetch, evaluate the memory address argument in case it has side
1349 effects. */
1351 static void
1352 expand_builtin_prefetch (tree exp)
1354 tree arg0, arg1, arg2;
1355 int nargs;
1356 rtx op0, op1, op2;
1358 if (!validate_arglist (exp, POINTER_TYPE, 0))
1359 return;
1361 arg0 = CALL_EXPR_ARG (exp, 0);
1363 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1364 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1365 locality). */
1366 nargs = call_expr_nargs (exp);
1367 if (nargs > 1)
1368 arg1 = CALL_EXPR_ARG (exp, 1);
1369 else
1370 arg1 = integer_zero_node;
1371 if (nargs > 2)
1372 arg2 = CALL_EXPR_ARG (exp, 2);
1373 else
1374 arg2 = integer_three_node;
1376 /* Argument 0 is an address. */
1377 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1379 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1380 if (TREE_CODE (arg1) != INTEGER_CST)
1382 error ("second argument to %<__builtin_prefetch%> must be a constant");
1383 arg1 = integer_zero_node;
1385 op1 = expand_normal (arg1);
1386 /* Argument 1 must be either zero or one. */
1387 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1389 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1390 " using zero");
1391 op1 = const0_rtx;
1394 /* Argument 2 (locality) must be a compile-time constant int. */
1395 if (TREE_CODE (arg2) != INTEGER_CST)
1397 error ("third argument to %<__builtin_prefetch%> must be a constant");
1398 arg2 = integer_zero_node;
1400 op2 = expand_normal (arg2);
1401 /* Argument 2 must be 0, 1, 2, or 3. */
1402 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1404 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1405 op2 = const0_rtx;
1408 if (targetm.have_prefetch ())
1410 struct expand_operand ops[3];
1412 create_address_operand (&ops[0], op0);
1413 create_integer_operand (&ops[1], INTVAL (op1));
1414 create_integer_operand (&ops[2], INTVAL (op2));
1415 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1416 return;
1419 /* Don't do anything with direct references to volatile memory, but
1420 generate code to handle other side effects. */
1421 if (!MEM_P (op0) && side_effects_p (op0))
1422 emit_insn (op0);
1425 /* Get a MEM rtx for expression EXP which is the address of an operand
1426 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1427 the maximum length of the block of memory that might be accessed or
1428 NULL if unknown. */
1430 static rtx
1431 get_memory_rtx (tree exp, tree len)
1433 tree orig_exp = exp;
1434 rtx addr, mem;
1436 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1437 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1438 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1439 exp = TREE_OPERAND (exp, 0);
1441 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1442 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1444 /* Get an expression we can use to find the attributes to assign to MEM.
1445 First remove any nops. */
1446 while (CONVERT_EXPR_P (exp)
1447 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1448 exp = TREE_OPERAND (exp, 0);
1450 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1451 (as builtin stringops may alias with anything). */
1452 exp = fold_build2 (MEM_REF,
1453 build_array_type (char_type_node,
1454 build_range_type (sizetype,
1455 size_one_node, len)),
1456 exp, build_int_cst (ptr_type_node, 0));
1458 /* If the MEM_REF has no acceptable address, try to get the base object
1459 from the original address we got, and build an all-aliasing
1460 unknown-sized access to that one. */
1461 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1462 set_mem_attributes (mem, exp, 0);
1463 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1464 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1465 0))))
1467 exp = build_fold_addr_expr (exp);
1468 exp = fold_build2 (MEM_REF,
1469 build_array_type (char_type_node,
1470 build_range_type (sizetype,
1471 size_zero_node,
1472 NULL)),
1473 exp, build_int_cst (ptr_type_node, 0));
1474 set_mem_attributes (mem, exp, 0);
1476 set_mem_alias_set (mem, 0);
1477 return mem;
1480 /* Built-in functions to perform an untyped call and return. */
1482 #define apply_args_mode \
1483 (this_target_builtins->x_apply_args_mode)
1484 #define apply_result_mode \
1485 (this_target_builtins->x_apply_result_mode)
1487 /* Return the size required for the block returned by __builtin_apply_args,
1488 and initialize apply_args_mode. */
1490 static int
1491 apply_args_size (void)
1493 static int size = -1;
1494 int align;
1495 unsigned int regno;
1497 /* The values computed by this function never change. */
1498 if (size < 0)
1500 /* The first value is the incoming arg-pointer. */
1501 size = GET_MODE_SIZE (Pmode);
1503 /* The second value is the structure value address unless this is
1504 passed as an "invisible" first argument. */
1505 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1506 size += GET_MODE_SIZE (Pmode);
1508 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1509 if (FUNCTION_ARG_REGNO_P (regno))
1511 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1513 gcc_assert (mode != VOIDmode);
1515 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1516 if (size % align != 0)
1517 size = CEIL (size, align) * align;
1518 size += GET_MODE_SIZE (mode);
1519 apply_args_mode[regno] = mode;
1521 else
1523 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1526 return size;
1529 /* Return the size required for the block returned by __builtin_apply,
1530 and initialize apply_result_mode. */
1532 static int
1533 apply_result_size (void)
1535 static int size = -1;
1536 int align, regno;
1538 /* The values computed by this function never change. */
1539 if (size < 0)
1541 size = 0;
1543 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1544 if (targetm.calls.function_value_regno_p (regno))
1546 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1548 gcc_assert (mode != VOIDmode);
1550 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1551 if (size % align != 0)
1552 size = CEIL (size, align) * align;
1553 size += GET_MODE_SIZE (mode);
1554 apply_result_mode[regno] = mode;
1556 else
1557 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1559 /* Allow targets that use untyped_call and untyped_return to override
1560 the size so that machine-specific information can be stored here. */
1561 #ifdef APPLY_RESULT_SIZE
1562 size = APPLY_RESULT_SIZE;
1563 #endif
1565 return size;
1568 /* Create a vector describing the result block RESULT. If SAVEP is true,
1569 the result block is used to save the values; otherwise it is used to
1570 restore the values. */
1572 static rtx
1573 result_vector (int savep, rtx result)
1575 int regno, size, align, nelts;
1576 fixed_size_mode mode;
1577 rtx reg, mem;
1578 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1580 size = nelts = 0;
1581 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1582 if ((mode = apply_result_mode[regno]) != VOIDmode)
1584 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1585 if (size % align != 0)
1586 size = CEIL (size, align) * align;
1587 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1588 mem = adjust_address (result, mode, size);
1589 savevec[nelts++] = (savep
1590 ? gen_rtx_SET (mem, reg)
1591 : gen_rtx_SET (reg, mem));
1592 size += GET_MODE_SIZE (mode);
1594 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1597 /* Save the state required to perform an untyped call with the same
1598 arguments as were passed to the current function. */
1600 static rtx
1601 expand_builtin_apply_args_1 (void)
1603 rtx registers, tem;
1604 int size, align, regno;
1605 fixed_size_mode mode;
1606 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1608 /* Create a block where the arg-pointer, structure value address,
1609 and argument registers can be saved. */
1610 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1612 /* Walk past the arg-pointer and structure value address. */
1613 size = GET_MODE_SIZE (Pmode);
1614 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1615 size += GET_MODE_SIZE (Pmode);
1617 /* Save each register used in calling a function to the block. */
1618 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1619 if ((mode = apply_args_mode[regno]) != VOIDmode)
1621 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1622 if (size % align != 0)
1623 size = CEIL (size, align) * align;
1625 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1627 emit_move_insn (adjust_address (registers, mode, size), tem);
1628 size += GET_MODE_SIZE (mode);
1631 /* Save the arg pointer to the block. */
1632 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1633 /* We need the pointer as the caller actually passed them to us, not
1634 as we might have pretended they were passed. Make sure it's a valid
1635 operand, as emit_move_insn isn't expected to handle a PLUS. */
1636 if (STACK_GROWS_DOWNWARD)
1638 = force_operand (plus_constant (Pmode, tem,
1639 crtl->args.pretend_args_size),
1640 NULL_RTX);
1641 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1643 size = GET_MODE_SIZE (Pmode);
1645 /* Save the structure value address unless this is passed as an
1646 "invisible" first argument. */
1647 if (struct_incoming_value)
1649 emit_move_insn (adjust_address (registers, Pmode, size),
1650 copy_to_reg (struct_incoming_value));
1651 size += GET_MODE_SIZE (Pmode);
1654 /* Return the address of the block. */
1655 return copy_addr_to_reg (XEXP (registers, 0));
1658 /* __builtin_apply_args returns block of memory allocated on
1659 the stack into which is stored the arg pointer, structure
1660 value address, static chain, and all the registers that might
1661 possibly be used in performing a function call. The code is
1662 moved to the start of the function so the incoming values are
1663 saved. */
1665 static rtx
1666 expand_builtin_apply_args (void)
1668 /* Don't do __builtin_apply_args more than once in a function.
1669 Save the result of the first call and reuse it. */
1670 if (apply_args_value != 0)
1671 return apply_args_value;
1673 /* When this function is called, it means that registers must be
1674 saved on entry to this function. So we migrate the
1675 call to the first insn of this function. */
1676 rtx temp;
1678 start_sequence ();
1679 temp = expand_builtin_apply_args_1 ();
1680 rtx_insn *seq = get_insns ();
1681 end_sequence ();
1683 apply_args_value = temp;
1685 /* Put the insns after the NOTE that starts the function.
1686 If this is inside a start_sequence, make the outer-level insn
1687 chain current, so the code is placed at the start of the
1688 function. If internal_arg_pointer is a non-virtual pseudo,
1689 it needs to be placed after the function that initializes
1690 that pseudo. */
1691 push_topmost_sequence ();
1692 if (REG_P (crtl->args.internal_arg_pointer)
1693 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1694 emit_insn_before (seq, parm_birth_insn);
1695 else
1696 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1697 pop_topmost_sequence ();
1698 return temp;
1702 /* Perform an untyped call and save the state required to perform an
1703 untyped return of whatever value was returned by the given function. */
1705 static rtx
1706 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1708 int size, align, regno;
1709 fixed_size_mode mode;
1710 rtx incoming_args, result, reg, dest, src;
1711 rtx_call_insn *call_insn;
1712 rtx old_stack_level = 0;
1713 rtx call_fusage = 0;
1714 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1716 arguments = convert_memory_address (Pmode, arguments);
1718 /* Create a block where the return registers can be saved. */
1719 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1721 /* Fetch the arg pointer from the ARGUMENTS block. */
1722 incoming_args = gen_reg_rtx (Pmode);
1723 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1724 if (!STACK_GROWS_DOWNWARD)
1725 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1726 incoming_args, 0, OPTAB_LIB_WIDEN);
1728 /* Push a new argument block and copy the arguments. Do not allow
1729 the (potential) memcpy call below to interfere with our stack
1730 manipulations. */
1731 do_pending_stack_adjust ();
1732 NO_DEFER_POP;
1734 /* Save the stack with nonlocal if available. */
1735 if (targetm.have_save_stack_nonlocal ())
1736 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1737 else
1738 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1740 /* Allocate a block of memory onto the stack and copy the memory
1741 arguments to the outgoing arguments address. We can pass TRUE
1742 as the 4th argument because we just saved the stack pointer
1743 and will restore it right after the call. */
1744 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1746 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1747 may have already set current_function_calls_alloca to true.
1748 current_function_calls_alloca won't be set if argsize is zero,
1749 so we have to guarantee need_drap is true here. */
1750 if (SUPPORTS_STACK_ALIGNMENT)
1751 crtl->need_drap = true;
1753 dest = virtual_outgoing_args_rtx;
1754 if (!STACK_GROWS_DOWNWARD)
1756 if (CONST_INT_P (argsize))
1757 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1758 else
1759 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1761 dest = gen_rtx_MEM (BLKmode, dest);
1762 set_mem_align (dest, PARM_BOUNDARY);
1763 src = gen_rtx_MEM (BLKmode, incoming_args);
1764 set_mem_align (src, PARM_BOUNDARY);
1765 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1767 /* Refer to the argument block. */
1768 apply_args_size ();
1769 arguments = gen_rtx_MEM (BLKmode, arguments);
1770 set_mem_align (arguments, PARM_BOUNDARY);
1772 /* Walk past the arg-pointer and structure value address. */
1773 size = GET_MODE_SIZE (Pmode);
1774 if (struct_value)
1775 size += GET_MODE_SIZE (Pmode);
1777 /* Restore each of the registers previously saved. Make USE insns
1778 for each of these registers for use in making the call. */
1779 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1780 if ((mode = apply_args_mode[regno]) != VOIDmode)
1782 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1783 if (size % align != 0)
1784 size = CEIL (size, align) * align;
1785 reg = gen_rtx_REG (mode, regno);
1786 emit_move_insn (reg, adjust_address (arguments, mode, size));
1787 use_reg (&call_fusage, reg);
1788 size += GET_MODE_SIZE (mode);
1791 /* Restore the structure value address unless this is passed as an
1792 "invisible" first argument. */
1793 size = GET_MODE_SIZE (Pmode);
1794 if (struct_value)
1796 rtx value = gen_reg_rtx (Pmode);
1797 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1798 emit_move_insn (struct_value, value);
1799 if (REG_P (struct_value))
1800 use_reg (&call_fusage, struct_value);
1801 size += GET_MODE_SIZE (Pmode);
1804 /* All arguments and registers used for the call are set up by now! */
1805 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1807 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1808 and we don't want to load it into a register as an optimization,
1809 because prepare_call_address already did it if it should be done. */
1810 if (GET_CODE (function) != SYMBOL_REF)
1811 function = memory_address (FUNCTION_MODE, function);
1813 /* Generate the actual call instruction and save the return value. */
1814 if (targetm.have_untyped_call ())
1816 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1817 emit_call_insn (targetm.gen_untyped_call (mem, result,
1818 result_vector (1, result)));
1820 else if (targetm.have_call_value ())
1822 rtx valreg = 0;
1824 /* Locate the unique return register. It is not possible to
1825 express a call that sets more than one return register using
1826 call_value; use untyped_call for that. In fact, untyped_call
1827 only needs to save the return registers in the given block. */
1828 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1829 if ((mode = apply_result_mode[regno]) != VOIDmode)
1831 gcc_assert (!valreg); /* have_untyped_call required. */
1833 valreg = gen_rtx_REG (mode, regno);
1836 emit_insn (targetm.gen_call_value (valreg,
1837 gen_rtx_MEM (FUNCTION_MODE, function),
1838 const0_rtx, NULL_RTX, const0_rtx));
1840 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1842 else
1843 gcc_unreachable ();
1845 /* Find the CALL insn we just emitted, and attach the register usage
1846 information. */
1847 call_insn = last_call_insn ();
1848 add_function_usage_to (call_insn, call_fusage);
1850 /* Restore the stack. */
1851 if (targetm.have_save_stack_nonlocal ())
1852 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1853 else
1854 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1855 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1857 OK_DEFER_POP;
1859 /* Return the address of the result block. */
1860 result = copy_addr_to_reg (XEXP (result, 0));
1861 return convert_memory_address (ptr_mode, result);
1864 /* Perform an untyped return. */
1866 static void
1867 expand_builtin_return (rtx result)
1869 int size, align, regno;
1870 fixed_size_mode mode;
1871 rtx reg;
1872 rtx_insn *call_fusage = 0;
1874 result = convert_memory_address (Pmode, result);
1876 apply_result_size ();
1877 result = gen_rtx_MEM (BLKmode, result);
1879 if (targetm.have_untyped_return ())
1881 rtx vector = result_vector (0, result);
1882 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1883 emit_barrier ();
1884 return;
1887 /* Restore the return value and note that each value is used. */
1888 size = 0;
1889 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1890 if ((mode = apply_result_mode[regno]) != VOIDmode)
1892 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1893 if (size % align != 0)
1894 size = CEIL (size, align) * align;
1895 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1896 emit_move_insn (reg, adjust_address (result, mode, size));
1898 push_to_sequence (call_fusage);
1899 emit_use (reg);
1900 call_fusage = get_insns ();
1901 end_sequence ();
1902 size += GET_MODE_SIZE (mode);
1905 /* Put the USE insns before the return. */
1906 emit_insn (call_fusage);
1908 /* Return whatever values was restored by jumping directly to the end
1909 of the function. */
1910 expand_naked_return ();
1913 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1915 static enum type_class
1916 type_to_class (tree type)
1918 switch (TREE_CODE (type))
1920 case VOID_TYPE: return void_type_class;
1921 case INTEGER_TYPE: return integer_type_class;
1922 case ENUMERAL_TYPE: return enumeral_type_class;
1923 case BOOLEAN_TYPE: return boolean_type_class;
1924 case POINTER_TYPE: return pointer_type_class;
1925 case REFERENCE_TYPE: return reference_type_class;
1926 case OFFSET_TYPE: return offset_type_class;
1927 case REAL_TYPE: return real_type_class;
1928 case COMPLEX_TYPE: return complex_type_class;
1929 case FUNCTION_TYPE: return function_type_class;
1930 case METHOD_TYPE: return method_type_class;
1931 case RECORD_TYPE: return record_type_class;
1932 case UNION_TYPE:
1933 case QUAL_UNION_TYPE: return union_type_class;
1934 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1935 ? string_type_class : array_type_class);
1936 case LANG_TYPE: return lang_type_class;
1937 default: return no_type_class;
1941 /* Expand a call EXP to __builtin_classify_type. */
1943 static rtx
1944 expand_builtin_classify_type (tree exp)
1946 if (call_expr_nargs (exp))
1947 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1948 return GEN_INT (no_type_class);
1951 /* This helper macro, meant to be used in mathfn_built_in below, determines
1952 which among a set of builtin math functions is appropriate for a given type
1953 mode. The `F' (float) and `L' (long double) are automatically generated
1954 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1955 types, there are additional types that are considered with 'F32', 'F64',
1956 'F128', etc. suffixes. */
1957 #define CASE_MATHFN(MATHFN) \
1958 CASE_CFN_##MATHFN: \
1959 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1960 fcodel = BUILT_IN_##MATHFN##L ; break;
1961 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1962 types. */
1963 #define CASE_MATHFN_FLOATN(MATHFN) \
1964 CASE_CFN_##MATHFN: \
1965 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1966 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1967 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1968 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1969 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1970 break;
1971 /* Similar to above, but appends _R after any F/L suffix. */
1972 #define CASE_MATHFN_REENT(MATHFN) \
1973 case CFN_BUILT_IN_##MATHFN##_R: \
1974 case CFN_BUILT_IN_##MATHFN##F_R: \
1975 case CFN_BUILT_IN_##MATHFN##L_R: \
1976 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1977 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1979 /* Return a function equivalent to FN but operating on floating-point
1980 values of type TYPE, or END_BUILTINS if no such function exists.
1981 This is purely an operation on function codes; it does not guarantee
1982 that the target actually has an implementation of the function. */
1984 static built_in_function
1985 mathfn_built_in_2 (tree type, combined_fn fn)
1987 tree mtype;
1988 built_in_function fcode, fcodef, fcodel;
1989 built_in_function fcodef16 = END_BUILTINS;
1990 built_in_function fcodef32 = END_BUILTINS;
1991 built_in_function fcodef64 = END_BUILTINS;
1992 built_in_function fcodef128 = END_BUILTINS;
1993 built_in_function fcodef32x = END_BUILTINS;
1994 built_in_function fcodef64x = END_BUILTINS;
1995 built_in_function fcodef128x = END_BUILTINS;
1997 switch (fn)
1999 CASE_MATHFN (ACOS)
2000 CASE_MATHFN (ACOSH)
2001 CASE_MATHFN (ASIN)
2002 CASE_MATHFN (ASINH)
2003 CASE_MATHFN (ATAN)
2004 CASE_MATHFN (ATAN2)
2005 CASE_MATHFN (ATANH)
2006 CASE_MATHFN (CBRT)
2007 CASE_MATHFN_FLOATN (CEIL)
2008 CASE_MATHFN (CEXPI)
2009 CASE_MATHFN_FLOATN (COPYSIGN)
2010 CASE_MATHFN (COS)
2011 CASE_MATHFN (COSH)
2012 CASE_MATHFN (DREM)
2013 CASE_MATHFN (ERF)
2014 CASE_MATHFN (ERFC)
2015 CASE_MATHFN (EXP)
2016 CASE_MATHFN (EXP10)
2017 CASE_MATHFN (EXP2)
2018 CASE_MATHFN (EXPM1)
2019 CASE_MATHFN (FABS)
2020 CASE_MATHFN (FDIM)
2021 CASE_MATHFN_FLOATN (FLOOR)
2022 CASE_MATHFN_FLOATN (FMA)
2023 CASE_MATHFN_FLOATN (FMAX)
2024 CASE_MATHFN_FLOATN (FMIN)
2025 CASE_MATHFN (FMOD)
2026 CASE_MATHFN (FREXP)
2027 CASE_MATHFN (GAMMA)
2028 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
2029 CASE_MATHFN (HUGE_VAL)
2030 CASE_MATHFN (HYPOT)
2031 CASE_MATHFN (ILOGB)
2032 CASE_MATHFN (ICEIL)
2033 CASE_MATHFN (IFLOOR)
2034 CASE_MATHFN (INF)
2035 CASE_MATHFN (IRINT)
2036 CASE_MATHFN (IROUND)
2037 CASE_MATHFN (ISINF)
2038 CASE_MATHFN (J0)
2039 CASE_MATHFN (J1)
2040 CASE_MATHFN (JN)
2041 CASE_MATHFN (LCEIL)
2042 CASE_MATHFN (LDEXP)
2043 CASE_MATHFN (LFLOOR)
2044 CASE_MATHFN (LGAMMA)
2045 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
2046 CASE_MATHFN (LLCEIL)
2047 CASE_MATHFN (LLFLOOR)
2048 CASE_MATHFN (LLRINT)
2049 CASE_MATHFN (LLROUND)
2050 CASE_MATHFN (LOG)
2051 CASE_MATHFN (LOG10)
2052 CASE_MATHFN (LOG1P)
2053 CASE_MATHFN (LOG2)
2054 CASE_MATHFN (LOGB)
2055 CASE_MATHFN (LRINT)
2056 CASE_MATHFN (LROUND)
2057 CASE_MATHFN (MODF)
2058 CASE_MATHFN (NAN)
2059 CASE_MATHFN (NANS)
2060 CASE_MATHFN_FLOATN (NEARBYINT)
2061 CASE_MATHFN (NEXTAFTER)
2062 CASE_MATHFN (NEXTTOWARD)
2063 CASE_MATHFN (POW)
2064 CASE_MATHFN (POWI)
2065 CASE_MATHFN (POW10)
2066 CASE_MATHFN (REMAINDER)
2067 CASE_MATHFN (REMQUO)
2068 CASE_MATHFN_FLOATN (RINT)
2069 CASE_MATHFN_FLOATN (ROUND)
2070 CASE_MATHFN (SCALB)
2071 CASE_MATHFN (SCALBLN)
2072 CASE_MATHFN (SCALBN)
2073 CASE_MATHFN (SIGNBIT)
2074 CASE_MATHFN (SIGNIFICAND)
2075 CASE_MATHFN (SIN)
2076 CASE_MATHFN (SINCOS)
2077 CASE_MATHFN (SINH)
2078 CASE_MATHFN_FLOATN (SQRT)
2079 CASE_MATHFN (TAN)
2080 CASE_MATHFN (TANH)
2081 CASE_MATHFN (TGAMMA)
2082 CASE_MATHFN_FLOATN (TRUNC)
2083 CASE_MATHFN (Y0)
2084 CASE_MATHFN (Y1)
2085 CASE_MATHFN (YN)
2087 default:
2088 return END_BUILTINS;
2091 mtype = TYPE_MAIN_VARIANT (type);
2092 if (mtype == double_type_node)
2093 return fcode;
2094 else if (mtype == float_type_node)
2095 return fcodef;
2096 else if (mtype == long_double_type_node)
2097 return fcodel;
2098 else if (mtype == float16_type_node)
2099 return fcodef16;
2100 else if (mtype == float32_type_node)
2101 return fcodef32;
2102 else if (mtype == float64_type_node)
2103 return fcodef64;
2104 else if (mtype == float128_type_node)
2105 return fcodef128;
2106 else if (mtype == float32x_type_node)
2107 return fcodef32x;
2108 else if (mtype == float64x_type_node)
2109 return fcodef64x;
2110 else if (mtype == float128x_type_node)
2111 return fcodef128x;
2112 else
2113 return END_BUILTINS;
2116 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2117 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2118 otherwise use the explicit declaration. If we can't do the conversion,
2119 return null. */
2121 static tree
2122 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2124 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2125 if (fcode2 == END_BUILTINS)
2126 return NULL_TREE;
2128 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2129 return NULL_TREE;
2131 return builtin_decl_explicit (fcode2);
2134 /* Like mathfn_built_in_1, but always use the implicit array. */
2136 tree
2137 mathfn_built_in (tree type, combined_fn fn)
2139 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2142 /* Like mathfn_built_in_1, but take a built_in_function and
2143 always use the implicit array. */
2145 tree
2146 mathfn_built_in (tree type, enum built_in_function fn)
2148 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2151 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2152 return its code, otherwise return IFN_LAST. Note that this function
2153 only tests whether the function is defined in internals.def, not whether
2154 it is actually available on the target. */
2156 internal_fn
2157 associated_internal_fn (tree fndecl)
2159 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2160 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2161 switch (DECL_FUNCTION_CODE (fndecl))
2163 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2164 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2165 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2166 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2167 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2168 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2169 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2170 #include "internal-fn.def"
2172 CASE_FLT_FN (BUILT_IN_POW10):
2173 return IFN_EXP10;
2175 CASE_FLT_FN (BUILT_IN_DREM):
2176 return IFN_REMAINDER;
2178 CASE_FLT_FN (BUILT_IN_SCALBN):
2179 CASE_FLT_FN (BUILT_IN_SCALBLN):
2180 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2181 return IFN_LDEXP;
2182 return IFN_LAST;
2184 default:
2185 return IFN_LAST;
2189 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2190 on the current target by a call to an internal function, return the
2191 code of that internal function, otherwise return IFN_LAST. The caller
2192 is responsible for ensuring that any side-effects of the built-in
2193 call are dealt with correctly. E.g. if CALL sets errno, the caller
2194 must decide that the errno result isn't needed or make it available
2195 in some other way. */
2197 internal_fn
2198 replacement_internal_fn (gcall *call)
2200 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2202 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2203 if (ifn != IFN_LAST)
2205 tree_pair types = direct_internal_fn_types (ifn, call);
2206 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2207 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2208 return ifn;
2211 return IFN_LAST;
2214 /* Expand a call to the builtin trinary math functions (fma).
2215 Return NULL_RTX if a normal call should be emitted rather than expanding the
2216 function in-line. EXP is the expression that is a call to the builtin
2217 function; if convenient, the result should be placed in TARGET.
2218 SUBTARGET may be used as the target for computing one of EXP's
2219 operands. */
2221 static rtx
2222 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2224 optab builtin_optab;
2225 rtx op0, op1, op2, result;
2226 rtx_insn *insns;
2227 tree fndecl = get_callee_fndecl (exp);
2228 tree arg0, arg1, arg2;
2229 machine_mode mode;
2231 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2232 return NULL_RTX;
2234 arg0 = CALL_EXPR_ARG (exp, 0);
2235 arg1 = CALL_EXPR_ARG (exp, 1);
2236 arg2 = CALL_EXPR_ARG (exp, 2);
2238 switch (DECL_FUNCTION_CODE (fndecl))
2240 CASE_FLT_FN (BUILT_IN_FMA):
2241 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2242 builtin_optab = fma_optab; break;
2243 default:
2244 gcc_unreachable ();
2247 /* Make a suitable register to place result in. */
2248 mode = TYPE_MODE (TREE_TYPE (exp));
2250 /* Before working hard, check whether the instruction is available. */
2251 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2252 return NULL_RTX;
2254 result = gen_reg_rtx (mode);
2256 /* Always stabilize the argument list. */
2257 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2258 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2259 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2261 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2262 op1 = expand_normal (arg1);
2263 op2 = expand_normal (arg2);
2265 start_sequence ();
2267 /* Compute into RESULT.
2268 Set RESULT to wherever the result comes back. */
2269 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2270 result, 0);
2272 /* If we were unable to expand via the builtin, stop the sequence
2273 (without outputting the insns) and call to the library function
2274 with the stabilized argument list. */
2275 if (result == 0)
2277 end_sequence ();
2278 return expand_call (exp, target, target == const0_rtx);
2281 /* Output the entire sequence. */
2282 insns = get_insns ();
2283 end_sequence ();
2284 emit_insn (insns);
2286 return result;
2289 /* Expand a call to the builtin sin and cos math functions.
2290 Return NULL_RTX if a normal call should be emitted rather than expanding the
2291 function in-line. EXP is the expression that is a call to the builtin
2292 function; if convenient, the result should be placed in TARGET.
2293 SUBTARGET may be used as the target for computing one of EXP's
2294 operands. */
2296 static rtx
2297 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2299 optab builtin_optab;
2300 rtx op0;
2301 rtx_insn *insns;
2302 tree fndecl = get_callee_fndecl (exp);
2303 machine_mode mode;
2304 tree arg;
2306 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2307 return NULL_RTX;
2309 arg = CALL_EXPR_ARG (exp, 0);
2311 switch (DECL_FUNCTION_CODE (fndecl))
2313 CASE_FLT_FN (BUILT_IN_SIN):
2314 CASE_FLT_FN (BUILT_IN_COS):
2315 builtin_optab = sincos_optab; break;
2316 default:
2317 gcc_unreachable ();
2320 /* Make a suitable register to place result in. */
2321 mode = TYPE_MODE (TREE_TYPE (exp));
2323 /* Check if sincos insn is available, otherwise fallback
2324 to sin or cos insn. */
2325 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2326 switch (DECL_FUNCTION_CODE (fndecl))
2328 CASE_FLT_FN (BUILT_IN_SIN):
2329 builtin_optab = sin_optab; break;
2330 CASE_FLT_FN (BUILT_IN_COS):
2331 builtin_optab = cos_optab; break;
2332 default:
2333 gcc_unreachable ();
2336 /* Before working hard, check whether the instruction is available. */
2337 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2339 rtx result = gen_reg_rtx (mode);
2341 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2342 need to expand the argument again. This way, we will not perform
2343 side-effects more the once. */
2344 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2346 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2348 start_sequence ();
2350 /* Compute into RESULT.
2351 Set RESULT to wherever the result comes back. */
2352 if (builtin_optab == sincos_optab)
2354 int ok;
2356 switch (DECL_FUNCTION_CODE (fndecl))
2358 CASE_FLT_FN (BUILT_IN_SIN):
2359 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2360 break;
2361 CASE_FLT_FN (BUILT_IN_COS):
2362 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2363 break;
2364 default:
2365 gcc_unreachable ();
2367 gcc_assert (ok);
2369 else
2370 result = expand_unop (mode, builtin_optab, op0, result, 0);
2372 if (result != 0)
2374 /* Output the entire sequence. */
2375 insns = get_insns ();
2376 end_sequence ();
2377 emit_insn (insns);
2378 return result;
2381 /* If we were unable to expand via the builtin, stop the sequence
2382 (without outputting the insns) and call to the library function
2383 with the stabilized argument list. */
2384 end_sequence ();
2387 return expand_call (exp, target, target == const0_rtx);
2390 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2391 return an RTL instruction code that implements the functionality.
2392 If that isn't possible or available return CODE_FOR_nothing. */
2394 static enum insn_code
2395 interclass_mathfn_icode (tree arg, tree fndecl)
2397 bool errno_set = false;
2398 optab builtin_optab = unknown_optab;
2399 machine_mode mode;
2401 switch (DECL_FUNCTION_CODE (fndecl))
2403 CASE_FLT_FN (BUILT_IN_ILOGB):
2404 errno_set = true; builtin_optab = ilogb_optab; break;
2405 CASE_FLT_FN (BUILT_IN_ISINF):
2406 builtin_optab = isinf_optab; break;
2407 case BUILT_IN_ISNORMAL:
2408 case BUILT_IN_ISFINITE:
2409 CASE_FLT_FN (BUILT_IN_FINITE):
2410 case BUILT_IN_FINITED32:
2411 case BUILT_IN_FINITED64:
2412 case BUILT_IN_FINITED128:
2413 case BUILT_IN_ISINFD32:
2414 case BUILT_IN_ISINFD64:
2415 case BUILT_IN_ISINFD128:
2416 /* These builtins have no optabs (yet). */
2417 break;
2418 default:
2419 gcc_unreachable ();
2422 /* There's no easy way to detect the case we need to set EDOM. */
2423 if (flag_errno_math && errno_set)
2424 return CODE_FOR_nothing;
2426 /* Optab mode depends on the mode of the input argument. */
2427 mode = TYPE_MODE (TREE_TYPE (arg));
2429 if (builtin_optab)
2430 return optab_handler (builtin_optab, mode);
2431 return CODE_FOR_nothing;
2434 /* Expand a call to one of the builtin math functions that operate on
2435 floating point argument and output an integer result (ilogb, isinf,
2436 isnan, etc).
2437 Return 0 if a normal call should be emitted rather than expanding the
2438 function in-line. EXP is the expression that is a call to the builtin
2439 function; if convenient, the result should be placed in TARGET. */
2441 static rtx
2442 expand_builtin_interclass_mathfn (tree exp, rtx target)
2444 enum insn_code icode = CODE_FOR_nothing;
2445 rtx op0;
2446 tree fndecl = get_callee_fndecl (exp);
2447 machine_mode mode;
2448 tree arg;
2450 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2451 return NULL_RTX;
2453 arg = CALL_EXPR_ARG (exp, 0);
2454 icode = interclass_mathfn_icode (arg, fndecl);
2455 mode = TYPE_MODE (TREE_TYPE (arg));
2457 if (icode != CODE_FOR_nothing)
2459 struct expand_operand ops[1];
2460 rtx_insn *last = get_last_insn ();
2461 tree orig_arg = arg;
2463 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2464 need to expand the argument again. This way, we will not perform
2465 side-effects more the once. */
2466 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2468 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2470 if (mode != GET_MODE (op0))
2471 op0 = convert_to_mode (mode, op0, 0);
2473 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2474 if (maybe_legitimize_operands (icode, 0, 1, ops)
2475 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2476 return ops[0].value;
2478 delete_insns_since (last);
2479 CALL_EXPR_ARG (exp, 0) = orig_arg;
2482 return NULL_RTX;
2485 /* Expand a call to the builtin sincos math function.
2486 Return NULL_RTX if a normal call should be emitted rather than expanding the
2487 function in-line. EXP is the expression that is a call to the builtin
2488 function. */
2490 static rtx
2491 expand_builtin_sincos (tree exp)
2493 rtx op0, op1, op2, target1, target2;
2494 machine_mode mode;
2495 tree arg, sinp, cosp;
2496 int result;
2497 location_t loc = EXPR_LOCATION (exp);
2498 tree alias_type, alias_off;
2500 if (!validate_arglist (exp, REAL_TYPE,
2501 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2502 return NULL_RTX;
2504 arg = CALL_EXPR_ARG (exp, 0);
2505 sinp = CALL_EXPR_ARG (exp, 1);
2506 cosp = CALL_EXPR_ARG (exp, 2);
2508 /* Make a suitable register to place result in. */
2509 mode = TYPE_MODE (TREE_TYPE (arg));
2511 /* Check if sincos insn is available, otherwise emit the call. */
2512 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2513 return NULL_RTX;
2515 target1 = gen_reg_rtx (mode);
2516 target2 = gen_reg_rtx (mode);
2518 op0 = expand_normal (arg);
2519 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2520 alias_off = build_int_cst (alias_type, 0);
2521 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2522 sinp, alias_off));
2523 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2524 cosp, alias_off));
2526 /* Compute into target1 and target2.
2527 Set TARGET to wherever the result comes back. */
2528 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2529 gcc_assert (result);
2531 /* Move target1 and target2 to the memory locations indicated
2532 by op1 and op2. */
2533 emit_move_insn (op1, target1);
2534 emit_move_insn (op2, target2);
2536 return const0_rtx;
2539 /* Expand a call to the internal cexpi builtin to the sincos math function.
2540 EXP is the expression that is a call to the builtin function; if convenient,
2541 the result should be placed in TARGET. */
2543 static rtx
2544 expand_builtin_cexpi (tree exp, rtx target)
2546 tree fndecl = get_callee_fndecl (exp);
2547 tree arg, type;
2548 machine_mode mode;
2549 rtx op0, op1, op2;
2550 location_t loc = EXPR_LOCATION (exp);
2552 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2553 return NULL_RTX;
2555 arg = CALL_EXPR_ARG (exp, 0);
2556 type = TREE_TYPE (arg);
2557 mode = TYPE_MODE (TREE_TYPE (arg));
2559 /* Try expanding via a sincos optab, fall back to emitting a libcall
2560 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2561 is only generated from sincos, cexp or if we have either of them. */
2562 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2564 op1 = gen_reg_rtx (mode);
2565 op2 = gen_reg_rtx (mode);
2567 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2569 /* Compute into op1 and op2. */
2570 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2572 else if (targetm.libc_has_function (function_sincos))
2574 tree call, fn = NULL_TREE;
2575 tree top1, top2;
2576 rtx op1a, op2a;
2578 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2579 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2580 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2581 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2582 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2583 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2584 else
2585 gcc_unreachable ();
2587 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2588 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2589 op1a = copy_addr_to_reg (XEXP (op1, 0));
2590 op2a = copy_addr_to_reg (XEXP (op2, 0));
2591 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2592 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2594 /* Make sure not to fold the sincos call again. */
2595 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2596 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2597 call, 3, arg, top1, top2));
2599 else
2601 tree call, fn = NULL_TREE, narg;
2602 tree ctype = build_complex_type (type);
2604 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2605 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2606 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2607 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2608 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2609 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2610 else
2611 gcc_unreachable ();
2613 /* If we don't have a decl for cexp create one. This is the
2614 friendliest fallback if the user calls __builtin_cexpi
2615 without full target C99 function support. */
2616 if (fn == NULL_TREE)
2618 tree fntype;
2619 const char *name = NULL;
2621 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2622 name = "cexpf";
2623 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2624 name = "cexp";
2625 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2626 name = "cexpl";
2628 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2629 fn = build_fn_decl (name, fntype);
2632 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2633 build_real (type, dconst0), arg);
2635 /* Make sure not to fold the cexp call again. */
2636 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2637 return expand_expr (build_call_nary (ctype, call, 1, narg),
2638 target, VOIDmode, EXPAND_NORMAL);
2641 /* Now build the proper return type. */
2642 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2643 make_tree (TREE_TYPE (arg), op2),
2644 make_tree (TREE_TYPE (arg), op1)),
2645 target, VOIDmode, EXPAND_NORMAL);
2648 /* Conveniently construct a function call expression. FNDECL names the
2649 function to be called, N is the number of arguments, and the "..."
2650 parameters are the argument expressions. Unlike build_call_exr
2651 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2653 static tree
2654 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2656 va_list ap;
2657 tree fntype = TREE_TYPE (fndecl);
2658 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2660 va_start (ap, n);
2661 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2662 va_end (ap);
2663 SET_EXPR_LOCATION (fn, loc);
2664 return fn;
2667 /* Expand a call to one of the builtin rounding functions gcc defines
2668 as an extension (lfloor and lceil). As these are gcc extensions we
2669 do not need to worry about setting errno to EDOM.
2670 If expanding via optab fails, lower expression to (int)(floor(x)).
2671 EXP is the expression that is a call to the builtin function;
2672 if convenient, the result should be placed in TARGET. */
2674 static rtx
2675 expand_builtin_int_roundingfn (tree exp, rtx target)
2677 convert_optab builtin_optab;
2678 rtx op0, tmp;
2679 rtx_insn *insns;
2680 tree fndecl = get_callee_fndecl (exp);
2681 enum built_in_function fallback_fn;
2682 tree fallback_fndecl;
2683 machine_mode mode;
2684 tree arg;
2686 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2687 gcc_unreachable ();
2689 arg = CALL_EXPR_ARG (exp, 0);
2691 switch (DECL_FUNCTION_CODE (fndecl))
2693 CASE_FLT_FN (BUILT_IN_ICEIL):
2694 CASE_FLT_FN (BUILT_IN_LCEIL):
2695 CASE_FLT_FN (BUILT_IN_LLCEIL):
2696 builtin_optab = lceil_optab;
2697 fallback_fn = BUILT_IN_CEIL;
2698 break;
2700 CASE_FLT_FN (BUILT_IN_IFLOOR):
2701 CASE_FLT_FN (BUILT_IN_LFLOOR):
2702 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2703 builtin_optab = lfloor_optab;
2704 fallback_fn = BUILT_IN_FLOOR;
2705 break;
2707 default:
2708 gcc_unreachable ();
2711 /* Make a suitable register to place result in. */
2712 mode = TYPE_MODE (TREE_TYPE (exp));
2714 target = gen_reg_rtx (mode);
2716 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2717 need to expand the argument again. This way, we will not perform
2718 side-effects more the once. */
2719 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2721 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2723 start_sequence ();
2725 /* Compute into TARGET. */
2726 if (expand_sfix_optab (target, op0, builtin_optab))
2728 /* Output the entire sequence. */
2729 insns = get_insns ();
2730 end_sequence ();
2731 emit_insn (insns);
2732 return target;
2735 /* If we were unable to expand via the builtin, stop the sequence
2736 (without outputting the insns). */
2737 end_sequence ();
2739 /* Fall back to floating point rounding optab. */
2740 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2742 /* For non-C99 targets we may end up without a fallback fndecl here
2743 if the user called __builtin_lfloor directly. In this case emit
2744 a call to the floor/ceil variants nevertheless. This should result
2745 in the best user experience for not full C99 targets. */
2746 if (fallback_fndecl == NULL_TREE)
2748 tree fntype;
2749 const char *name = NULL;
2751 switch (DECL_FUNCTION_CODE (fndecl))
2753 case BUILT_IN_ICEIL:
2754 case BUILT_IN_LCEIL:
2755 case BUILT_IN_LLCEIL:
2756 name = "ceil";
2757 break;
2758 case BUILT_IN_ICEILF:
2759 case BUILT_IN_LCEILF:
2760 case BUILT_IN_LLCEILF:
2761 name = "ceilf";
2762 break;
2763 case BUILT_IN_ICEILL:
2764 case BUILT_IN_LCEILL:
2765 case BUILT_IN_LLCEILL:
2766 name = "ceill";
2767 break;
2768 case BUILT_IN_IFLOOR:
2769 case BUILT_IN_LFLOOR:
2770 case BUILT_IN_LLFLOOR:
2771 name = "floor";
2772 break;
2773 case BUILT_IN_IFLOORF:
2774 case BUILT_IN_LFLOORF:
2775 case BUILT_IN_LLFLOORF:
2776 name = "floorf";
2777 break;
2778 case BUILT_IN_IFLOORL:
2779 case BUILT_IN_LFLOORL:
2780 case BUILT_IN_LLFLOORL:
2781 name = "floorl";
2782 break;
2783 default:
2784 gcc_unreachable ();
2787 fntype = build_function_type_list (TREE_TYPE (arg),
2788 TREE_TYPE (arg), NULL_TREE);
2789 fallback_fndecl = build_fn_decl (name, fntype);
2792 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2794 tmp = expand_normal (exp);
2795 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2797 /* Truncate the result of floating point optab to integer
2798 via expand_fix (). */
2799 target = gen_reg_rtx (mode);
2800 expand_fix (target, tmp, 0);
2802 return target;
2805 /* Expand a call to one of the builtin math functions doing integer
2806 conversion (lrint).
2807 Return 0 if a normal call should be emitted rather than expanding the
2808 function in-line. EXP is the expression that is a call to the builtin
2809 function; if convenient, the result should be placed in TARGET. */
2811 static rtx
2812 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2814 convert_optab builtin_optab;
2815 rtx op0;
2816 rtx_insn *insns;
2817 tree fndecl = get_callee_fndecl (exp);
2818 tree arg;
2819 machine_mode mode;
2820 enum built_in_function fallback_fn = BUILT_IN_NONE;
2822 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2823 gcc_unreachable ();
2825 arg = CALL_EXPR_ARG (exp, 0);
2827 switch (DECL_FUNCTION_CODE (fndecl))
2829 CASE_FLT_FN (BUILT_IN_IRINT):
2830 fallback_fn = BUILT_IN_LRINT;
2831 gcc_fallthrough ();
2832 CASE_FLT_FN (BUILT_IN_LRINT):
2833 CASE_FLT_FN (BUILT_IN_LLRINT):
2834 builtin_optab = lrint_optab;
2835 break;
2837 CASE_FLT_FN (BUILT_IN_IROUND):
2838 fallback_fn = BUILT_IN_LROUND;
2839 gcc_fallthrough ();
2840 CASE_FLT_FN (BUILT_IN_LROUND):
2841 CASE_FLT_FN (BUILT_IN_LLROUND):
2842 builtin_optab = lround_optab;
2843 break;
2845 default:
2846 gcc_unreachable ();
2849 /* There's no easy way to detect the case we need to set EDOM. */
2850 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2851 return NULL_RTX;
2853 /* Make a suitable register to place result in. */
2854 mode = TYPE_MODE (TREE_TYPE (exp));
2856 /* There's no easy way to detect the case we need to set EDOM. */
2857 if (!flag_errno_math)
2859 rtx result = gen_reg_rtx (mode);
2861 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2862 need to expand the argument again. This way, we will not perform
2863 side-effects more the once. */
2864 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2866 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2868 start_sequence ();
2870 if (expand_sfix_optab (result, op0, builtin_optab))
2872 /* Output the entire sequence. */
2873 insns = get_insns ();
2874 end_sequence ();
2875 emit_insn (insns);
2876 return result;
2879 /* If we were unable to expand via the builtin, stop the sequence
2880 (without outputting the insns) and call to the library function
2881 with the stabilized argument list. */
2882 end_sequence ();
2885 if (fallback_fn != BUILT_IN_NONE)
2887 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2888 targets, (int) round (x) should never be transformed into
2889 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2890 a call to lround in the hope that the target provides at least some
2891 C99 functions. This should result in the best user experience for
2892 not full C99 targets. */
2893 tree fallback_fndecl = mathfn_built_in_1
2894 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2896 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2897 fallback_fndecl, 1, arg);
2899 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2900 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2901 return convert_to_mode (mode, target, 0);
2904 return expand_call (exp, target, target == const0_rtx);
2907 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2908 a normal call should be emitted rather than expanding the function
2909 in-line. EXP is the expression that is a call to the builtin
2910 function; if convenient, the result should be placed in TARGET. */
2912 static rtx
2913 expand_builtin_powi (tree exp, rtx target)
2915 tree arg0, arg1;
2916 rtx op0, op1;
2917 machine_mode mode;
2918 machine_mode mode2;
2920 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2921 return NULL_RTX;
2923 arg0 = CALL_EXPR_ARG (exp, 0);
2924 arg1 = CALL_EXPR_ARG (exp, 1);
2925 mode = TYPE_MODE (TREE_TYPE (exp));
2927 /* Emit a libcall to libgcc. */
2929 /* Mode of the 2nd argument must match that of an int. */
2930 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2932 if (target == NULL_RTX)
2933 target = gen_reg_rtx (mode);
2935 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2936 if (GET_MODE (op0) != mode)
2937 op0 = convert_to_mode (mode, op0, 0);
2938 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2939 if (GET_MODE (op1) != mode2)
2940 op1 = convert_to_mode (mode2, op1, 0);
2942 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2943 target, LCT_CONST, mode,
2944 op0, mode, op1, mode2);
2946 return target;
2949 /* Expand expression EXP which is a call to the strlen builtin. Return
2950 NULL_RTX if we failed and the caller should emit a normal call, otherwise
2951 try to get the result in TARGET, if convenient. */
2953 static rtx
2954 expand_builtin_strlen (tree exp, rtx target,
2955 machine_mode target_mode)
2957 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2958 return NULL_RTX;
2960 struct expand_operand ops[4];
2961 rtx pat;
2962 tree len;
2963 tree src = CALL_EXPR_ARG (exp, 0);
2964 rtx src_reg;
2965 rtx_insn *before_strlen;
2966 machine_mode insn_mode;
2967 enum insn_code icode = CODE_FOR_nothing;
2968 unsigned int align;
2970 /* If the length can be computed at compile-time, return it. */
2971 len = c_strlen (src, 0);
2972 if (len)
2973 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2975 /* If the length can be computed at compile-time and is constant
2976 integer, but there are side-effects in src, evaluate
2977 src for side-effects, then return len.
2978 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2979 can be optimized into: i++; x = 3; */
2980 len = c_strlen (src, 1);
2981 if (len && TREE_CODE (len) == INTEGER_CST)
2983 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2984 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2987 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2989 /* If SRC is not a pointer type, don't do this operation inline. */
2990 if (align == 0)
2991 return NULL_RTX;
2993 /* Bail out if we can't compute strlen in the right mode. */
2994 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2996 icode = optab_handler (strlen_optab, insn_mode);
2997 if (icode != CODE_FOR_nothing)
2998 break;
3000 if (insn_mode == VOIDmode)
3001 return NULL_RTX;
3003 /* Make a place to hold the source address. We will not expand
3004 the actual source until we are sure that the expansion will
3005 not fail -- there are trees that cannot be expanded twice. */
3006 src_reg = gen_reg_rtx (Pmode);
3008 /* Mark the beginning of the strlen sequence so we can emit the
3009 source operand later. */
3010 before_strlen = get_last_insn ();
3012 create_output_operand (&ops[0], target, insn_mode);
3013 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3014 create_integer_operand (&ops[2], 0);
3015 create_integer_operand (&ops[3], align);
3016 if (!maybe_expand_insn (icode, 4, ops))
3017 return NULL_RTX;
3019 /* Check to see if the argument was declared attribute nonstring
3020 and if so, issue a warning since at this point it's not known
3021 to be nul-terminated. */
3022 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3024 /* Now that we are assured of success, expand the source. */
3025 start_sequence ();
3026 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3027 if (pat != src_reg)
3029 #ifdef POINTERS_EXTEND_UNSIGNED
3030 if (GET_MODE (pat) != Pmode)
3031 pat = convert_to_mode (Pmode, pat,
3032 POINTERS_EXTEND_UNSIGNED);
3033 #endif
3034 emit_move_insn (src_reg, pat);
3036 pat = get_insns ();
3037 end_sequence ();
3039 if (before_strlen)
3040 emit_insn_after (pat, before_strlen);
3041 else
3042 emit_insn_before (pat, get_insns ());
3044 /* Return the value in the proper mode for this function. */
3045 if (GET_MODE (ops[0].value) == target_mode)
3046 target = ops[0].value;
3047 else if (target != 0)
3048 convert_move (target, ops[0].value, 0);
3049 else
3050 target = convert_to_mode (target_mode, ops[0].value, 0);
3052 return target;
3055 /* Expand call EXP to the strnlen built-in, returning the result
3056 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3058 static rtx
3059 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3061 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3062 return NULL_RTX;
3064 tree src = CALL_EXPR_ARG (exp, 0);
3065 tree bound = CALL_EXPR_ARG (exp, 1);
3067 if (!bound)
3068 return NULL_RTX;
3070 location_t loc = UNKNOWN_LOCATION;
3071 if (EXPR_HAS_LOCATION (exp))
3072 loc = EXPR_LOCATION (exp);
3074 tree maxobjsize = max_object_size ();
3075 tree func = get_callee_fndecl (exp);
3077 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3078 so these conversions aren't necessary. */
3079 c_strlen_data data;
3080 memset (&data, 0, sizeof (c_strlen_data));
3081 tree len = c_strlen (src, 0, &data, 1);
3082 if (len)
3083 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3085 if (TREE_CODE (bound) == INTEGER_CST)
3087 if (!TREE_NO_WARNING (exp)
3088 && tree_int_cst_lt (maxobjsize, bound)
3089 && warning_at (loc, OPT_Wstringop_overflow_,
3090 "%K%qD specified bound %E "
3091 "exceeds maximum object size %E",
3092 exp, func, bound, maxobjsize))
3093 TREE_NO_WARNING (exp) = true;
3095 bool exact = true;
3096 if (!len || TREE_CODE (len) != INTEGER_CST)
3098 /* Clear EXACT if LEN may be less than SRC suggests,
3099 such as in
3100 strnlen (&a[i], sizeof a)
3101 where the value of i is unknown. Unless i's value is
3102 zero, the call is unsafe because the bound is greater. */
3103 data.decl = unterminated_array (src, &len, &exact);
3104 if (!data.decl)
3105 return NULL_RTX;
3108 if (data.decl
3109 && !TREE_NO_WARNING (exp)
3110 && ((tree_int_cst_lt (len, bound))
3111 || !exact))
3113 location_t warnloc
3114 = expansion_point_location_if_in_system_header (loc);
3116 if (warning_at (warnloc, OPT_Wstringop_overflow_,
3117 exact
3118 ? G_("%K%qD specified bound %E exceeds the size %E "
3119 "of unterminated array")
3120 : G_("%K%qD specified bound %E may exceed the size "
3121 "of at most %E of unterminated array"),
3122 exp, func, bound, len))
3124 inform (DECL_SOURCE_LOCATION (data.decl),
3125 "referenced argument declared here");
3126 TREE_NO_WARNING (exp) = true;
3127 return NULL_RTX;
3131 if (!len)
3132 return NULL_RTX;
3134 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3135 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3138 if (TREE_CODE (bound) != SSA_NAME)
3139 return NULL_RTX;
3141 wide_int min, max;
3142 enum value_range_kind rng = get_range_info (bound, &min, &max);
3143 if (rng != VR_RANGE)
3144 return NULL_RTX;
3146 if (!TREE_NO_WARNING (exp)
3147 && wi::ltu_p (wi::to_wide (maxobjsize), min)
3148 && warning_at (loc, OPT_Wstringop_overflow_,
3149 "%K%qD specified bound [%wu, %wu] "
3150 "exceeds maximum object size %E",
3151 exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
3152 TREE_NO_WARNING (exp) = true;
3154 bool exact = true;
3155 if (!len || TREE_CODE (len) != INTEGER_CST)
3157 data.decl = unterminated_array (src, &len, &exact);
3158 if (!data.decl)
3159 return NULL_RTX;
3162 if (data.decl
3163 && !TREE_NO_WARNING (exp)
3164 && (wi::ltu_p (wi::to_wide (len), min)
3165 || !exact))
3167 location_t warnloc
3168 = expansion_point_location_if_in_system_header (loc);
3170 if (warning_at (warnloc, OPT_Wstringop_overflow_,
3171 exact
3172 ? G_("%K%qD specified bound [%wu, %wu] exceeds "
3173 "the size %E of unterminated array")
3174 : G_("%K%qD specified bound [%wu, %wu] may exceed "
3175 "the size of at most %E of unterminated array"),
3176 exp, func, min.to_uhwi (), max.to_uhwi (), len))
3178 inform (DECL_SOURCE_LOCATION (data.decl),
3179 "referenced argument declared here");
3180 TREE_NO_WARNING (exp) = true;
3184 if (data.decl)
3185 return NULL_RTX;
3187 if (wi::gtu_p (min, wi::to_wide (len)))
3188 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3190 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3191 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3194 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3195 bytes from constant string DATA + OFFSET and return it as target
3196 constant. */
3198 static rtx
3199 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3200 scalar_int_mode mode)
3202 const char *str = (const char *) data;
3204 gcc_assert (offset >= 0
3205 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3206 <= strlen (str) + 1));
3208 return c_readstr (str + offset, mode);
3211 /* LEN specify length of the block of memcpy/memset operation.
3212 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3213 In some cases we can make very likely guess on max size, then we
3214 set it into PROBABLE_MAX_SIZE. */
3216 static void
3217 determine_block_size (tree len, rtx len_rtx,
3218 unsigned HOST_WIDE_INT *min_size,
3219 unsigned HOST_WIDE_INT *max_size,
3220 unsigned HOST_WIDE_INT *probable_max_size)
3222 if (CONST_INT_P (len_rtx))
3224 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3225 return;
3227 else
3229 wide_int min, max;
3230 enum value_range_kind range_type = VR_UNDEFINED;
3232 /* Determine bounds from the type. */
3233 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3234 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3235 else
3236 *min_size = 0;
3237 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3238 *probable_max_size = *max_size
3239 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3240 else
3241 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3243 if (TREE_CODE (len) == SSA_NAME)
3244 range_type = get_range_info (len, &min, &max);
3245 if (range_type == VR_RANGE)
3247 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3248 *min_size = min.to_uhwi ();
3249 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3250 *probable_max_size = *max_size = max.to_uhwi ();
3252 else if (range_type == VR_ANTI_RANGE)
3254 /* Anti range 0...N lets us to determine minimal size to N+1. */
3255 if (min == 0)
3257 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3258 *min_size = max.to_uhwi () + 1;
3260 /* Code like
3262 int n;
3263 if (n < 100)
3264 memcpy (a, b, n)
3266 Produce anti range allowing negative values of N. We still
3267 can use the information and make a guess that N is not negative.
3269 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3270 *probable_max_size = min.to_uhwi () - 1;
3273 gcc_checking_assert (*max_size <=
3274 (unsigned HOST_WIDE_INT)
3275 GET_MODE_MASK (GET_MODE (len_rtx)));
3278 /* Try to verify that the sizes and lengths of the arguments to a string
3279 manipulation function given by EXP are within valid bounds and that
3280 the operation does not lead to buffer overflow or read past the end.
3281 Arguments other than EXP may be null. When non-null, the arguments
3282 have the following meaning:
3283 DST is the destination of a copy call or NULL otherwise.
3284 SRC is the source of a copy call or NULL otherwise.
3285 DSTWRITE is the number of bytes written into the destination obtained
3286 from the user-supplied size argument to the function (such as in
3287 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3288 MAXREAD is the user-supplied bound on the length of the source sequence
3289 (such as in strncat(d, s, N). It specifies the upper limit on the number
3290 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3291 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3292 expression EXP is a string function call (as opposed to a memory call
3293 like memcpy). As an exception, SRCSTR can also be an integer denoting
3294 the precomputed size of the source string or object (for functions like
3295 memcpy).
3296 DSTSIZE is the size of the destination object specified by the last
3297 argument to the _chk builtins, typically resulting from the expansion
3298 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3299 DSTSIZE).
3301 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3302 SIZE_MAX.
3304 If the call is successfully verified as safe return true, otherwise
3305 return false. */
3307 static bool
3308 check_access (tree exp, tree, tree, tree dstwrite,
3309 tree maxread, tree srcstr, tree dstsize)
3311 int opt = OPT_Wstringop_overflow_;
3313 /* The size of the largest object is half the address space, or
3314 PTRDIFF_MAX. (This is way too permissive.) */
3315 tree maxobjsize = max_object_size ();
3317 /* Either the length of the source string for string functions or
3318 the size of the source object for raw memory functions. */
3319 tree slen = NULL_TREE;
3321 tree range[2] = { NULL_TREE, NULL_TREE };
3323 /* Set to true when the exact number of bytes written by a string
3324 function like strcpy is not known and the only thing that is
3325 known is that it must be at least one (for the terminating nul). */
3326 bool at_least_one = false;
3327 if (srcstr)
3329 /* SRCSTR is normally a pointer to string but as a special case
3330 it can be an integer denoting the length of a string. */
3331 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3333 /* Try to determine the range of lengths the source string
3334 refers to. If it can be determined and is less than
3335 the upper bound given by MAXREAD add one to it for
3336 the terminating nul. Otherwise, set it to one for
3337 the same reason, or to MAXREAD as appropriate. */
3338 get_range_strlen (srcstr, range);
3339 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3341 if (maxread && tree_int_cst_le (maxread, range[0]))
3342 range[0] = range[1] = maxread;
3343 else
3344 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3345 range[0], size_one_node);
3347 if (maxread && tree_int_cst_le (maxread, range[1]))
3348 range[1] = maxread;
3349 else if (!integer_all_onesp (range[1]))
3350 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3351 range[1], size_one_node);
3353 slen = range[0];
3355 else
3357 at_least_one = true;
3358 slen = size_one_node;
3361 else
3362 slen = srcstr;
3365 if (!dstwrite && !maxread)
3367 /* When the only available piece of data is the object size
3368 there is nothing to do. */
3369 if (!slen)
3370 return true;
3372 /* Otherwise, when the length of the source sequence is known
3373 (as with strlen), set DSTWRITE to it. */
3374 if (!range[0])
3375 dstwrite = slen;
3378 if (!dstsize)
3379 dstsize = maxobjsize;
3381 if (dstwrite)
3382 get_size_range (dstwrite, range);
3384 tree func = get_callee_fndecl (exp);
3386 /* First check the number of bytes to be written against the maximum
3387 object size. */
3388 if (range[0]
3389 && TREE_CODE (range[0]) == INTEGER_CST
3390 && tree_int_cst_lt (maxobjsize, range[0]))
3392 if (TREE_NO_WARNING (exp))
3393 return false;
3395 location_t loc = tree_nonartificial_location (exp);
3396 loc = expansion_point_location_if_in_system_header (loc);
3398 bool warned;
3399 if (range[0] == range[1])
3400 warned = warning_at (loc, opt,
3401 "%K%qD specified size %E "
3402 "exceeds maximum object size %E",
3403 exp, func, range[0], maxobjsize);
3404 else
3405 warned = warning_at (loc, opt,
3406 "%K%qD specified size between %E and %E "
3407 "exceeds maximum object size %E",
3408 exp, func,
3409 range[0], range[1], maxobjsize);
3410 if (warned)
3411 TREE_NO_WARNING (exp) = true;
3413 return false;
3416 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3417 constant, and in range of unsigned HOST_WIDE_INT. */
3418 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3420 /* Next check the number of bytes to be written against the destination
3421 object size. */
3422 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3424 if (range[0]
3425 && TREE_CODE (range[0]) == INTEGER_CST
3426 && ((tree_fits_uhwi_p (dstsize)
3427 && tree_int_cst_lt (dstsize, range[0]))
3428 || (dstwrite
3429 && tree_fits_uhwi_p (dstwrite)
3430 && tree_int_cst_lt (dstwrite, range[0]))))
3432 if (TREE_NO_WARNING (exp))
3433 return false;
3435 location_t loc = tree_nonartificial_location (exp);
3436 loc = expansion_point_location_if_in_system_header (loc);
3438 if (dstwrite == slen && at_least_one)
3440 /* This is a call to strcpy with a destination of 0 size
3441 and a source of unknown length. The call will write
3442 at least one byte past the end of the destination. */
3443 warning_at (loc, opt,
3444 "%K%qD writing %E or more bytes into a region "
3445 "of size %E overflows the destination",
3446 exp, func, range[0], dstsize);
3448 else if (tree_int_cst_equal (range[0], range[1]))
3449 warning_n (loc, opt, tree_to_uhwi (range[0]),
3450 "%K%qD writing %E byte into a region "
3451 "of size %E overflows the destination",
3452 "%K%qD writing %E bytes into a region "
3453 "of size %E overflows the destination",
3454 exp, func, range[0], dstsize);
3455 else if (tree_int_cst_sign_bit (range[1]))
3457 /* Avoid printing the upper bound if it's invalid. */
3458 warning_at (loc, opt,
3459 "%K%qD writing %E or more bytes into a region "
3460 "of size %E overflows the destination",
3461 exp, func, range[0], dstsize);
3463 else
3464 warning_at (loc, opt,
3465 "%K%qD writing between %E and %E bytes into "
3466 "a region of size %E overflows the destination",
3467 exp, func, range[0], range[1],
3468 dstsize);
3470 /* Return error when an overflow has been detected. */
3471 return false;
3475 /* Check the maximum length of the source sequence against the size
3476 of the destination object if known, or against the maximum size
3477 of an object. */
3478 if (maxread)
3480 get_size_range (maxread, range);
3482 /* Use the lower end for MAXREAD from now on. */
3483 if (range[0])
3484 maxread = range[0];
3486 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3488 location_t loc = tree_nonartificial_location (exp);
3489 loc = expansion_point_location_if_in_system_header (loc);
3491 if (tree_int_cst_lt (maxobjsize, range[0]))
3493 if (TREE_NO_WARNING (exp))
3494 return false;
3496 /* Warn about crazy big sizes first since that's more
3497 likely to be meaningful than saying that the bound
3498 is greater than the object size if both are big. */
3499 if (range[0] == range[1])
3500 warning_at (loc, opt,
3501 "%K%qD specified bound %E "
3502 "exceeds maximum object size %E",
3503 exp, func,
3504 range[0], maxobjsize);
3505 else
3506 warning_at (loc, opt,
3507 "%K%qD specified bound between %E and %E "
3508 "exceeds maximum object size %E",
3509 exp, func,
3510 range[0], range[1], maxobjsize);
3512 return false;
3515 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3517 if (TREE_NO_WARNING (exp))
3518 return false;
3520 if (tree_int_cst_equal (range[0], range[1]))
3521 warning_at (loc, opt,
3522 "%K%qD specified bound %E "
3523 "exceeds destination size %E",
3524 exp, func,
3525 range[0], dstsize);
3526 else
3527 warning_at (loc, opt,
3528 "%K%qD specified bound between %E and %E "
3529 "exceeds destination size %E",
3530 exp, func,
3531 range[0], range[1], dstsize);
3532 return false;
3537 /* Check for reading past the end of SRC. */
3538 if (slen
3539 && slen == srcstr
3540 && dstwrite && range[0]
3541 && tree_int_cst_lt (slen, range[0]))
3543 if (TREE_NO_WARNING (exp))
3544 return false;
3546 location_t loc = tree_nonartificial_location (exp);
3548 if (tree_int_cst_equal (range[0], range[1]))
3549 warning_n (loc, opt, tree_to_uhwi (range[0]),
3550 "%K%qD reading %E byte from a region of size %E",
3551 "%K%qD reading %E bytes from a region of size %E",
3552 exp, func, range[0], slen);
3553 else if (tree_int_cst_sign_bit (range[1]))
3555 /* Avoid printing the upper bound if it's invalid. */
3556 warning_at (loc, opt,
3557 "%K%qD reading %E or more bytes from a region "
3558 "of size %E",
3559 exp, func, range[0], slen);
3561 else
3562 warning_at (loc, opt,
3563 "%K%qD reading between %E and %E bytes from a region "
3564 "of size %E",
3565 exp, func, range[0], range[1], slen);
3566 return false;
3569 return true;
3572 /* Helper to compute the size of the object referenced by the DEST
3573 expression which must have pointer type, using Object Size type
3574 OSTYPE (only the least significant 2 bits are used). Return
3575 an estimate of the size of the object if successful or NULL when
3576 the size cannot be determined. When the referenced object involves
3577 a non-constant offset in some range the returned value represents
3578 the largest size given the smallest non-negative offset in the
3579 range. The function is intended for diagnostics and should not
3580 be used to influence code generation or optimization. */
3582 tree
3583 compute_objsize (tree dest, int ostype)
3585 unsigned HOST_WIDE_INT size;
3587 /* Only the two least significant bits are meaningful. */
3588 ostype &= 3;
3590 if (compute_builtin_object_size (dest, ostype, &size))
3591 return build_int_cst (sizetype, size);
3593 if (TREE_CODE (dest) == SSA_NAME)
3595 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3596 if (!is_gimple_assign (stmt))
3597 return NULL_TREE;
3599 dest = gimple_assign_rhs1 (stmt);
3601 tree_code code = gimple_assign_rhs_code (stmt);
3602 if (code == POINTER_PLUS_EXPR)
3604 /* compute_builtin_object_size fails for addresses with
3605 non-constant offsets. Try to determine the range of
3606 such an offset here and use it to adjust the constant
3607 size. */
3608 tree off = gimple_assign_rhs2 (stmt);
3609 if (TREE_CODE (off) == INTEGER_CST)
3611 if (tree size = compute_objsize (dest, ostype))
3613 wide_int wioff = wi::to_wide (off);
3614 wide_int wisiz = wi::to_wide (size);
3616 /* Ignore negative offsets for now. For others,
3617 use the lower bound as the most optimistic
3618 estimate of the (remaining) size. */
3619 if (wi::sign_mask (wioff))
3621 else if (wi::ltu_p (wioff, wisiz))
3622 return wide_int_to_tree (TREE_TYPE (size),
3623 wi::sub (wisiz, wioff));
3624 else
3625 return size_zero_node;
3628 else if (TREE_CODE (off) == SSA_NAME
3629 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3631 wide_int min, max;
3632 enum value_range_kind rng = get_range_info (off, &min, &max);
3634 if (rng == VR_RANGE)
3636 if (tree size = compute_objsize (dest, ostype))
3638 wide_int wisiz = wi::to_wide (size);
3640 /* Ignore negative offsets for now. For others,
3641 use the lower bound as the most optimistic
3642 estimate of the (remaining)size. */
3643 if (wi::sign_mask (min))
3645 else if (wi::ltu_p (min, wisiz))
3646 return wide_int_to_tree (TREE_TYPE (size),
3647 wi::sub (wisiz, min));
3648 else
3649 return size_zero_node;
3654 else if (code != ADDR_EXPR)
3655 return NULL_TREE;
3658 /* Unless computing the largest size (for memcpy and other raw memory
3659 functions), try to determine the size of the object from its type. */
3660 if (!ostype)
3661 return NULL_TREE;
3663 if (TREE_CODE (dest) != ADDR_EXPR)
3664 return NULL_TREE;
3666 tree type = TREE_TYPE (dest);
3667 if (TREE_CODE (type) == POINTER_TYPE)
3668 type = TREE_TYPE (type);
3670 type = TYPE_MAIN_VARIANT (type);
3672 if (TREE_CODE (type) == ARRAY_TYPE
3673 && !array_at_struct_end_p (TREE_OPERAND (dest, 0)))
3675 /* Return the constant size unless it's zero (that's a zero-length
3676 array likely at the end of a struct). */
3677 tree size = TYPE_SIZE_UNIT (type);
3678 if (size && TREE_CODE (size) == INTEGER_CST
3679 && !integer_zerop (size))
3680 return size;
3683 return NULL_TREE;
3686 /* Helper to determine and check the sizes of the source and the destination
3687 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3688 call expression, DEST is the destination argument, SRC is the source
3689 argument or null, and LEN is the number of bytes. Use Object Size type-0
3690 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3691 (no overflow or invalid sizes), false otherwise. */
3693 static bool
3694 check_memop_access (tree exp, tree dest, tree src, tree size)
3696 /* For functions like memset and memcpy that operate on raw memory
3697 try to determine the size of the largest source and destination
3698 object using type-0 Object Size regardless of the object size
3699 type specified by the option. */
3700 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3701 tree dstsize = compute_objsize (dest, 0);
3703 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3704 srcsize, dstsize);
3707 /* Validate memchr arguments without performing any expansion.
3708 Return NULL_RTX. */
3710 static rtx
3711 expand_builtin_memchr (tree exp, rtx)
3713 if (!validate_arglist (exp,
3714 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3715 return NULL_RTX;
3717 tree arg1 = CALL_EXPR_ARG (exp, 0);
3718 tree len = CALL_EXPR_ARG (exp, 2);
3720 /* Diagnose calls where the specified length exceeds the size
3721 of the object. */
3722 if (warn_stringop_overflow)
3724 tree size = compute_objsize (arg1, 0);
3725 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3726 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
3729 return NULL_RTX;
3732 /* Expand a call EXP to the memcpy builtin.
3733 Return NULL_RTX if we failed, the caller should emit a normal call,
3734 otherwise try to get the result in TARGET, if convenient (and in
3735 mode MODE if that's convenient). */
3737 static rtx
3738 expand_builtin_memcpy (tree exp, rtx target)
3740 if (!validate_arglist (exp,
3741 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3742 return NULL_RTX;
3744 tree dest = CALL_EXPR_ARG (exp, 0);
3745 tree src = CALL_EXPR_ARG (exp, 1);
3746 tree len = CALL_EXPR_ARG (exp, 2);
3748 check_memop_access (exp, dest, src, len);
3750 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3751 /*endp=*/ 0);
3754 /* Check a call EXP to the memmove built-in for validity.
3755 Return NULL_RTX on both success and failure. */
3757 static rtx
3758 expand_builtin_memmove (tree exp, rtx)
3760 if (!validate_arglist (exp,
3761 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3762 return NULL_RTX;
3764 tree dest = CALL_EXPR_ARG (exp, 0);
3765 tree src = CALL_EXPR_ARG (exp, 1);
3766 tree len = CALL_EXPR_ARG (exp, 2);
3768 check_memop_access (exp, dest, src, len);
3770 return NULL_RTX;
3773 /* Expand a call EXP to the mempcpy builtin.
3774 Return NULL_RTX if we failed; the caller should emit a normal call,
3775 otherwise try to get the result in TARGET, if convenient (and in
3776 mode MODE if that's convenient). If ENDP is 0 return the
3777 destination pointer, if ENDP is 1 return the end pointer ala
3778 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3779 stpcpy. */
3781 static rtx
3782 expand_builtin_mempcpy (tree exp, rtx target)
3784 if (!validate_arglist (exp,
3785 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3786 return NULL_RTX;
3788 tree dest = CALL_EXPR_ARG (exp, 0);
3789 tree src = CALL_EXPR_ARG (exp, 1);
3790 tree len = CALL_EXPR_ARG (exp, 2);
3792 /* Policy does not generally allow using compute_objsize (which
3793 is used internally by check_memop_size) to change code generation
3794 or drive optimization decisions.
3796 In this instance it is safe because the code we generate has
3797 the same semantics regardless of the return value of
3798 check_memop_sizes. Exactly the same amount of data is copied
3799 and the return value is exactly the same in both cases.
3801 Furthermore, check_memop_size always uses mode 0 for the call to
3802 compute_objsize, so the imprecise nature of compute_objsize is
3803 avoided. */
3805 /* Avoid expanding mempcpy into memcpy when the call is determined
3806 to overflow the buffer. This also prevents the same overflow
3807 from being diagnosed again when expanding memcpy. */
3808 if (!check_memop_access (exp, dest, src, len))
3809 return NULL_RTX;
3811 return expand_builtin_mempcpy_args (dest, src, len,
3812 target, exp, /*endp=*/ 1);
3815 /* Helper function to do the actual work for expand of memory copy family
3816 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3817 of memory from SRC to DEST and assign to TARGET if convenient.
3818 If ENDP is 0 return the
3819 destination pointer, if ENDP is 1 return the end pointer ala
3820 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3821 stpcpy. */
3823 static rtx
3824 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3825 rtx target, tree exp, int endp)
3827 const char *src_str;
3828 unsigned int src_align = get_pointer_alignment (src);
3829 unsigned int dest_align = get_pointer_alignment (dest);
3830 rtx dest_mem, src_mem, dest_addr, len_rtx;
3831 HOST_WIDE_INT expected_size = -1;
3832 unsigned int expected_align = 0;
3833 unsigned HOST_WIDE_INT min_size;
3834 unsigned HOST_WIDE_INT max_size;
3835 unsigned HOST_WIDE_INT probable_max_size;
3837 /* If DEST is not a pointer type, call the normal function. */
3838 if (dest_align == 0)
3839 return NULL_RTX;
3841 /* If either SRC is not a pointer type, don't do this
3842 operation in-line. */
3843 if (src_align == 0)
3844 return NULL_RTX;
3846 if (currently_expanding_gimple_stmt)
3847 stringop_block_profile (currently_expanding_gimple_stmt,
3848 &expected_align, &expected_size);
3850 if (expected_align < dest_align)
3851 expected_align = dest_align;
3852 dest_mem = get_memory_rtx (dest, len);
3853 set_mem_align (dest_mem, dest_align);
3854 len_rtx = expand_normal (len);
3855 determine_block_size (len, len_rtx, &min_size, &max_size,
3856 &probable_max_size);
3857 src_str = c_getstr (src);
3859 /* If SRC is a string constant and block move would be done
3860 by pieces, we can avoid loading the string from memory
3861 and only stored the computed constants. */
3862 if (src_str
3863 && CONST_INT_P (len_rtx)
3864 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3865 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3866 CONST_CAST (char *, src_str),
3867 dest_align, false))
3869 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3870 builtin_memcpy_read_str,
3871 CONST_CAST (char *, src_str),
3872 dest_align, false, endp);
3873 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3874 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3875 return dest_mem;
3878 src_mem = get_memory_rtx (src, len);
3879 set_mem_align (src_mem, src_align);
3881 /* Copy word part most expediently. */
3882 enum block_op_methods method = BLOCK_OP_NORMAL;
3883 if (CALL_EXPR_TAILCALL (exp) && (endp == 0 || target == const0_rtx))
3884 method = BLOCK_OP_TAILCALL;
3885 if (endp == 1 && target != const0_rtx)
3886 method = BLOCK_OP_NO_LIBCALL_RET;
3887 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3888 expected_align, expected_size,
3889 min_size, max_size, probable_max_size);
3890 if (dest_addr == pc_rtx)
3891 return NULL_RTX;
3893 if (dest_addr == 0)
3895 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3896 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3899 if (endp && target != const0_rtx)
3901 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3902 /* stpcpy pointer to last byte. */
3903 if (endp == 2)
3904 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3907 return dest_addr;
3910 static rtx
3911 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3912 rtx target, tree orig_exp, int endp)
3914 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3915 endp);
3918 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3919 we failed, the caller should emit a normal call, otherwise try to
3920 get the result in TARGET, if convenient. If ENDP is 0 return the
3921 destination pointer, if ENDP is 1 return the end pointer ala
3922 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3923 stpcpy. */
3925 static rtx
3926 expand_movstr (tree dest, tree src, rtx target, int endp)
3928 struct expand_operand ops[3];
3929 rtx dest_mem;
3930 rtx src_mem;
3932 if (!targetm.have_movstr ())
3933 return NULL_RTX;
3935 dest_mem = get_memory_rtx (dest, NULL);
3936 src_mem = get_memory_rtx (src, NULL);
3937 if (!endp)
3939 target = force_reg (Pmode, XEXP (dest_mem, 0));
3940 dest_mem = replace_equiv_address (dest_mem, target);
3943 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3944 create_fixed_operand (&ops[1], dest_mem);
3945 create_fixed_operand (&ops[2], src_mem);
3946 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3947 return NULL_RTX;
3949 if (endp && target != const0_rtx)
3951 target = ops[0].value;
3952 /* movstr is supposed to set end to the address of the NUL
3953 terminator. If the caller requested a mempcpy-like return value,
3954 adjust it. */
3955 if (endp == 1)
3957 rtx tem = plus_constant (GET_MODE (target),
3958 gen_lowpart (GET_MODE (target), target), 1);
3959 emit_move_insn (target, force_operand (tem, NULL_RTX));
3962 return target;
3965 /* Do some very basic size validation of a call to the strcpy builtin
3966 given by EXP. Return NULL_RTX to have the built-in expand to a call
3967 to the library function. */
3969 static rtx
3970 expand_builtin_strcat (tree exp, rtx)
3972 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3973 || !warn_stringop_overflow)
3974 return NULL_RTX;
3976 tree dest = CALL_EXPR_ARG (exp, 0);
3977 tree src = CALL_EXPR_ARG (exp, 1);
3979 /* There is no way here to determine the length of the string in
3980 the destination to which the SRC string is being appended so
3981 just diagnose cases when the souce string is longer than
3982 the destination object. */
3984 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3986 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
3987 destsize);
3989 return NULL_RTX;
3992 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3993 NULL_RTX if we failed the caller should emit a normal call, otherwise
3994 try to get the result in TARGET, if convenient (and in mode MODE if that's
3995 convenient). */
3997 static rtx
3998 expand_builtin_strcpy (tree exp, rtx target)
4000 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4001 return NULL_RTX;
4003 tree dest = CALL_EXPR_ARG (exp, 0);
4004 tree src = CALL_EXPR_ARG (exp, 1);
4006 if (warn_stringop_overflow)
4008 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4009 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4010 src, destsize);
4013 if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
4015 /* Check to see if the argument was declared attribute nonstring
4016 and if so, issue a warning since at this point it's not known
4017 to be nul-terminated. */
4018 tree fndecl = get_callee_fndecl (exp);
4019 maybe_warn_nonstring_arg (fndecl, exp);
4020 return ret;
4023 return NULL_RTX;
4026 /* Helper function to do the actual work for expand_builtin_strcpy. The
4027 arguments to the builtin_strcpy call DEST and SRC are broken out
4028 so that this can also be called without constructing an actual CALL_EXPR.
4029 The other arguments and return value are the same as for
4030 expand_builtin_strcpy. */
4032 static rtx
4033 expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target)
4035 /* Detect strcpy calls with unterminated arrays.. */
4036 if (tree nonstr = unterminated_array (src))
4038 /* NONSTR refers to the non-nul terminated constant array. */
4039 if (!TREE_NO_WARNING (exp))
4040 warn_string_no_nul (EXPR_LOCATION (exp), "strcpy", src, nonstr);
4041 return NULL_RTX;
4044 return expand_movstr (dest, src, target, /*endp=*/0);
4047 /* Expand a call EXP to the stpcpy builtin.
4048 Return NULL_RTX if we failed the caller should emit a normal call,
4049 otherwise try to get the result in TARGET, if convenient (and in
4050 mode MODE if that's convenient). */
4052 static rtx
4053 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
4055 tree dst, src;
4056 location_t loc = EXPR_LOCATION (exp);
4058 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4059 return NULL_RTX;
4061 dst = CALL_EXPR_ARG (exp, 0);
4062 src = CALL_EXPR_ARG (exp, 1);
4064 if (warn_stringop_overflow)
4066 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
4067 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4068 src, destsize);
4071 /* If return value is ignored, transform stpcpy into strcpy. */
4072 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
4074 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
4075 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
4076 return expand_expr (result, target, mode, EXPAND_NORMAL);
4078 else
4080 tree len, lenp1;
4081 rtx ret;
4083 /* Ensure we get an actual string whose length can be evaluated at
4084 compile-time, not an expression containing a string. This is
4085 because the latter will potentially produce pessimized code
4086 when used to produce the return value. */
4087 c_strlen_data data;
4088 memset (&data, 0, sizeof (c_strlen_data));
4089 if (!c_getstr (src, NULL)
4090 || !(len = c_strlen (src, 0, &data, 1)))
4091 return expand_movstr (dst, src, target, /*endp=*/2);
4093 if (data.decl && !TREE_NO_WARNING (exp))
4094 warn_string_no_nul (EXPR_LOCATION (exp), "stpcpy", src, data.decl);
4096 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
4097 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
4098 target, exp, /*endp=*/2);
4100 if (ret)
4101 return ret;
4103 if (TREE_CODE (len) == INTEGER_CST)
4105 rtx len_rtx = expand_normal (len);
4107 if (CONST_INT_P (len_rtx))
4109 ret = expand_builtin_strcpy_args (exp, dst, src, target);
4111 if (ret)
4113 if (! target)
4115 if (mode != VOIDmode)
4116 target = gen_reg_rtx (mode);
4117 else
4118 target = gen_reg_rtx (GET_MODE (ret));
4120 if (GET_MODE (target) != GET_MODE (ret))
4121 ret = gen_lowpart (GET_MODE (target), ret);
4123 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
4124 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
4125 gcc_assert (ret);
4127 return target;
4132 return expand_movstr (dst, src, target, /*endp=*/2);
4136 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
4137 arguments while being careful to avoid duplicate warnings (which could
4138 be issued if the expander were to expand the call, resulting in it
4139 being emitted in expand_call(). */
4141 static rtx
4142 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
4144 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
4146 /* The call has been successfully expanded. Check for nonstring
4147 arguments and issue warnings as appropriate. */
4148 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
4149 return ret;
4152 return NULL_RTX;
4155 /* Check a call EXP to the stpncpy built-in for validity.
4156 Return NULL_RTX on both success and failure. */
4158 static rtx
4159 expand_builtin_stpncpy (tree exp, rtx)
4161 if (!validate_arglist (exp,
4162 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4163 || !warn_stringop_overflow)
4164 return NULL_RTX;
4166 /* The source and destination of the call. */
4167 tree dest = CALL_EXPR_ARG (exp, 0);
4168 tree src = CALL_EXPR_ARG (exp, 1);
4170 /* The exact number of bytes to write (not the maximum). */
4171 tree len = CALL_EXPR_ARG (exp, 2);
4173 /* The size of the destination object. */
4174 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4176 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
4178 return NULL_RTX;
4181 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4182 bytes from constant string DATA + OFFSET and return it as target
4183 constant. */
4186 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
4187 scalar_int_mode mode)
4189 const char *str = (const char *) data;
4191 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4192 return const0_rtx;
4194 return c_readstr (str + offset, mode);
4197 /* Helper to check the sizes of sequences and the destination of calls
4198 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4199 success (no overflow or invalid sizes), false otherwise. */
4201 static bool
4202 check_strncat_sizes (tree exp, tree objsize)
4204 tree dest = CALL_EXPR_ARG (exp, 0);
4205 tree src = CALL_EXPR_ARG (exp, 1);
4206 tree maxread = CALL_EXPR_ARG (exp, 2);
4208 /* Try to determine the range of lengths that the source expression
4209 refers to. */
4210 tree lenrange[2];
4211 get_range_strlen (src, lenrange);
4213 /* Try to verify that the destination is big enough for the shortest
4214 string. */
4216 if (!objsize && warn_stringop_overflow)
4218 /* If it hasn't been provided by __strncat_chk, try to determine
4219 the size of the destination object into which the source is
4220 being copied. */
4221 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
4224 /* Add one for the terminating nul. */
4225 tree srclen = (lenrange[0]
4226 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4227 size_one_node)
4228 : NULL_TREE);
4230 /* The strncat function copies at most MAXREAD bytes and always appends
4231 the terminating nul so the specified upper bound should never be equal
4232 to (or greater than) the size of the destination. */
4233 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4234 && tree_int_cst_equal (objsize, maxread))
4236 location_t loc = tree_nonartificial_location (exp);
4237 loc = expansion_point_location_if_in_system_header (loc);
4239 warning_at (loc, OPT_Wstringop_overflow_,
4240 "%K%qD specified bound %E equals destination size",
4241 exp, get_callee_fndecl (exp), maxread);
4243 return false;
4246 if (!srclen
4247 || (maxread && tree_fits_uhwi_p (maxread)
4248 && tree_fits_uhwi_p (srclen)
4249 && tree_int_cst_lt (maxread, srclen)))
4250 srclen = maxread;
4252 /* The number of bytes to write is LEN but check_access will also
4253 check SRCLEN if LEN's value isn't known. */
4254 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4255 objsize);
4258 /* Similar to expand_builtin_strcat, do some very basic size validation
4259 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4260 the built-in expand to a call to the library function. */
4262 static rtx
4263 expand_builtin_strncat (tree exp, rtx)
4265 if (!validate_arglist (exp,
4266 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4267 || !warn_stringop_overflow)
4268 return NULL_RTX;
4270 tree dest = CALL_EXPR_ARG (exp, 0);
4271 tree src = CALL_EXPR_ARG (exp, 1);
4272 /* The upper bound on the number of bytes to write. */
4273 tree maxread = CALL_EXPR_ARG (exp, 2);
4274 /* The length of the source sequence. */
4275 tree slen = c_strlen (src, 1);
4277 /* Try to determine the range of lengths that the source expression
4278 refers to. */
4279 tree lenrange[2];
4280 if (slen)
4281 lenrange[0] = lenrange[1] = slen;
4282 else
4283 get_range_strlen (src, lenrange);
4285 /* Try to verify that the destination is big enough for the shortest
4286 string. First try to determine the size of the destination object
4287 into which the source is being copied. */
4288 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4290 /* Add one for the terminating nul. */
4291 tree srclen = (lenrange[0]
4292 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4293 size_one_node)
4294 : NULL_TREE);
4296 /* The strncat function copies at most MAXREAD bytes and always appends
4297 the terminating nul so the specified upper bound should never be equal
4298 to (or greater than) the size of the destination. */
4299 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4300 && tree_int_cst_equal (destsize, maxread))
4302 location_t loc = tree_nonartificial_location (exp);
4303 loc = expansion_point_location_if_in_system_header (loc);
4305 warning_at (loc, OPT_Wstringop_overflow_,
4306 "%K%qD specified bound %E equals destination size",
4307 exp, get_callee_fndecl (exp), maxread);
4309 return NULL_RTX;
4312 if (!srclen
4313 || (maxread && tree_fits_uhwi_p (maxread)
4314 && tree_fits_uhwi_p (srclen)
4315 && tree_int_cst_lt (maxread, srclen)))
4316 srclen = maxread;
4318 /* The number of bytes to write is SRCLEN. */
4319 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
4321 return NULL_RTX;
4324 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4325 NULL_RTX if we failed the caller should emit a normal call. */
4327 static rtx
4328 expand_builtin_strncpy (tree exp, rtx target)
4330 location_t loc = EXPR_LOCATION (exp);
4332 if (validate_arglist (exp,
4333 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4335 tree dest = CALL_EXPR_ARG (exp, 0);
4336 tree src = CALL_EXPR_ARG (exp, 1);
4337 /* The number of bytes to write (not the maximum). */
4338 tree len = CALL_EXPR_ARG (exp, 2);
4339 /* The length of the source sequence. */
4340 tree slen = c_strlen (src, 1);
4342 if (warn_stringop_overflow)
4344 tree destsize = compute_objsize (dest,
4345 warn_stringop_overflow - 1);
4347 /* The number of bytes to write is LEN but check_access will also
4348 check SLEN if LEN's value isn't known. */
4349 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4350 destsize);
4353 /* We must be passed a constant len and src parameter. */
4354 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4355 return NULL_RTX;
4357 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4359 /* We're required to pad with trailing zeros if the requested
4360 len is greater than strlen(s2)+1. In that case try to
4361 use store_by_pieces, if it fails, punt. */
4362 if (tree_int_cst_lt (slen, len))
4364 unsigned int dest_align = get_pointer_alignment (dest);
4365 const char *p = c_getstr (src);
4366 rtx dest_mem;
4368 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4369 || !can_store_by_pieces (tree_to_uhwi (len),
4370 builtin_strncpy_read_str,
4371 CONST_CAST (char *, p),
4372 dest_align, false))
4373 return NULL_RTX;
4375 dest_mem = get_memory_rtx (dest, len);
4376 store_by_pieces (dest_mem, tree_to_uhwi (len),
4377 builtin_strncpy_read_str,
4378 CONST_CAST (char *, p), dest_align, false, 0);
4379 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4380 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4381 return dest_mem;
4384 return NULL_RTX;
4387 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4388 bytes from constant string DATA + OFFSET and return it as target
4389 constant. */
4392 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4393 scalar_int_mode mode)
4395 const char *c = (const char *) data;
4396 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4398 memset (p, *c, GET_MODE_SIZE (mode));
4400 return c_readstr (p, mode);
4403 /* Callback routine for store_by_pieces. Return the RTL of a register
4404 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4405 char value given in the RTL register data. For example, if mode is
4406 4 bytes wide, return the RTL for 0x01010101*data. */
4408 static rtx
4409 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4410 scalar_int_mode mode)
4412 rtx target, coeff;
4413 size_t size;
4414 char *p;
4416 size = GET_MODE_SIZE (mode);
4417 if (size == 1)
4418 return (rtx) data;
4420 p = XALLOCAVEC (char, size);
4421 memset (p, 1, size);
4422 coeff = c_readstr (p, mode);
4424 target = convert_to_mode (mode, (rtx) data, 1);
4425 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4426 return force_reg (mode, target);
4429 /* Expand expression EXP, which is a call to the memset builtin. Return
4430 NULL_RTX if we failed the caller should emit a normal call, otherwise
4431 try to get the result in TARGET, if convenient (and in mode MODE if that's
4432 convenient). */
4434 static rtx
4435 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4437 if (!validate_arglist (exp,
4438 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4439 return NULL_RTX;
4441 tree dest = CALL_EXPR_ARG (exp, 0);
4442 tree val = CALL_EXPR_ARG (exp, 1);
4443 tree len = CALL_EXPR_ARG (exp, 2);
4445 check_memop_access (exp, dest, NULL_TREE, len);
4447 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4450 /* Helper function to do the actual work for expand_builtin_memset. The
4451 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4452 so that this can also be called without constructing an actual CALL_EXPR.
4453 The other arguments and return value are the same as for
4454 expand_builtin_memset. */
4456 static rtx
4457 expand_builtin_memset_args (tree dest, tree val, tree len,
4458 rtx target, machine_mode mode, tree orig_exp)
4460 tree fndecl, fn;
4461 enum built_in_function fcode;
4462 machine_mode val_mode;
4463 char c;
4464 unsigned int dest_align;
4465 rtx dest_mem, dest_addr, len_rtx;
4466 HOST_WIDE_INT expected_size = -1;
4467 unsigned int expected_align = 0;
4468 unsigned HOST_WIDE_INT min_size;
4469 unsigned HOST_WIDE_INT max_size;
4470 unsigned HOST_WIDE_INT probable_max_size;
4472 dest_align = get_pointer_alignment (dest);
4474 /* If DEST is not a pointer type, don't do this operation in-line. */
4475 if (dest_align == 0)
4476 return NULL_RTX;
4478 if (currently_expanding_gimple_stmt)
4479 stringop_block_profile (currently_expanding_gimple_stmt,
4480 &expected_align, &expected_size);
4482 if (expected_align < dest_align)
4483 expected_align = dest_align;
4485 /* If the LEN parameter is zero, return DEST. */
4486 if (integer_zerop (len))
4488 /* Evaluate and ignore VAL in case it has side-effects. */
4489 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4490 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4493 /* Stabilize the arguments in case we fail. */
4494 dest = builtin_save_expr (dest);
4495 val = builtin_save_expr (val);
4496 len = builtin_save_expr (len);
4498 len_rtx = expand_normal (len);
4499 determine_block_size (len, len_rtx, &min_size, &max_size,
4500 &probable_max_size);
4501 dest_mem = get_memory_rtx (dest, len);
4502 val_mode = TYPE_MODE (unsigned_char_type_node);
4504 if (TREE_CODE (val) != INTEGER_CST)
4506 rtx val_rtx;
4508 val_rtx = expand_normal (val);
4509 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4511 /* Assume that we can memset by pieces if we can store
4512 * the coefficients by pieces (in the required modes).
4513 * We can't pass builtin_memset_gen_str as that emits RTL. */
4514 c = 1;
4515 if (tree_fits_uhwi_p (len)
4516 && can_store_by_pieces (tree_to_uhwi (len),
4517 builtin_memset_read_str, &c, dest_align,
4518 true))
4520 val_rtx = force_reg (val_mode, val_rtx);
4521 store_by_pieces (dest_mem, tree_to_uhwi (len),
4522 builtin_memset_gen_str, val_rtx, dest_align,
4523 true, 0);
4525 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4526 dest_align, expected_align,
4527 expected_size, min_size, max_size,
4528 probable_max_size))
4529 goto do_libcall;
4531 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4532 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4533 return dest_mem;
4536 if (target_char_cast (val, &c))
4537 goto do_libcall;
4539 if (c)
4541 if (tree_fits_uhwi_p (len)
4542 && can_store_by_pieces (tree_to_uhwi (len),
4543 builtin_memset_read_str, &c, dest_align,
4544 true))
4545 store_by_pieces (dest_mem, tree_to_uhwi (len),
4546 builtin_memset_read_str, &c, dest_align, true, 0);
4547 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4548 gen_int_mode (c, val_mode),
4549 dest_align, expected_align,
4550 expected_size, min_size, max_size,
4551 probable_max_size))
4552 goto do_libcall;
4554 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4555 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4556 return dest_mem;
4559 set_mem_align (dest_mem, dest_align);
4560 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4561 CALL_EXPR_TAILCALL (orig_exp)
4562 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4563 expected_align, expected_size,
4564 min_size, max_size,
4565 probable_max_size);
4567 if (dest_addr == 0)
4569 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4570 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4573 return dest_addr;
4575 do_libcall:
4576 fndecl = get_callee_fndecl (orig_exp);
4577 fcode = DECL_FUNCTION_CODE (fndecl);
4578 if (fcode == BUILT_IN_MEMSET)
4579 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4580 dest, val, len);
4581 else if (fcode == BUILT_IN_BZERO)
4582 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4583 dest, len);
4584 else
4585 gcc_unreachable ();
4586 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4587 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4588 return expand_call (fn, target, target == const0_rtx);
4591 /* Expand expression EXP, which is a call to the bzero builtin. Return
4592 NULL_RTX if we failed the caller should emit a normal call. */
4594 static rtx
4595 expand_builtin_bzero (tree exp)
4597 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4598 return NULL_RTX;
4600 tree dest = CALL_EXPR_ARG (exp, 0);
4601 tree size = CALL_EXPR_ARG (exp, 1);
4603 check_memop_access (exp, dest, NULL_TREE, size);
4605 /* New argument list transforming bzero(ptr x, int y) to
4606 memset(ptr x, int 0, size_t y). This is done this way
4607 so that if it isn't expanded inline, we fallback to
4608 calling bzero instead of memset. */
4610 location_t loc = EXPR_LOCATION (exp);
4612 return expand_builtin_memset_args (dest, integer_zero_node,
4613 fold_convert_loc (loc,
4614 size_type_node, size),
4615 const0_rtx, VOIDmode, exp);
4618 /* Try to expand cmpstr operation ICODE with the given operands.
4619 Return the result rtx on success, otherwise return null. */
4621 static rtx
4622 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4623 HOST_WIDE_INT align)
4625 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4627 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4628 target = NULL_RTX;
4630 struct expand_operand ops[4];
4631 create_output_operand (&ops[0], target, insn_mode);
4632 create_fixed_operand (&ops[1], arg1_rtx);
4633 create_fixed_operand (&ops[2], arg2_rtx);
4634 create_integer_operand (&ops[3], align);
4635 if (maybe_expand_insn (icode, 4, ops))
4636 return ops[0].value;
4637 return NULL_RTX;
4640 /* Expand expression EXP, which is a call to the memcmp built-in function.
4641 Return NULL_RTX if we failed and the caller should emit a normal call,
4642 otherwise try to get the result in TARGET, if convenient.
4643 RESULT_EQ is true if we can relax the returned value to be either zero
4644 or nonzero, without caring about the sign. */
4646 static rtx
4647 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4649 if (!validate_arglist (exp,
4650 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4651 return NULL_RTX;
4653 tree arg1 = CALL_EXPR_ARG (exp, 0);
4654 tree arg2 = CALL_EXPR_ARG (exp, 1);
4655 tree len = CALL_EXPR_ARG (exp, 2);
4656 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4657 bool no_overflow = true;
4659 /* Diagnose calls where the specified length exceeds the size of either
4660 object. */
4661 tree size = compute_objsize (arg1, 0);
4662 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4663 len, /*maxread=*/NULL_TREE, size,
4664 /*objsize=*/NULL_TREE);
4665 if (no_overflow)
4667 size = compute_objsize (arg2, 0);
4668 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4669 len, /*maxread=*/NULL_TREE, size,
4670 /*objsize=*/NULL_TREE);
4673 /* If the specified length exceeds the size of either object,
4674 call the function. */
4675 if (!no_overflow)
4676 return NULL_RTX;
4678 /* Due to the performance benefit, always inline the calls first
4679 when result_eq is false. */
4680 rtx result = NULL_RTX;
4682 if (!result_eq && fcode != BUILT_IN_BCMP)
4684 result = inline_expand_builtin_string_cmp (exp, target);
4685 if (result)
4686 return result;
4689 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4690 location_t loc = EXPR_LOCATION (exp);
4692 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4693 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4695 /* If we don't have POINTER_TYPE, call the function. */
4696 if (arg1_align == 0 || arg2_align == 0)
4697 return NULL_RTX;
4699 rtx arg1_rtx = get_memory_rtx (arg1, len);
4700 rtx arg2_rtx = get_memory_rtx (arg2, len);
4701 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4703 /* Set MEM_SIZE as appropriate. */
4704 if (CONST_INT_P (len_rtx))
4706 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4707 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4710 by_pieces_constfn constfn = NULL;
4712 const char *src_str = c_getstr (arg2);
4713 if (result_eq && src_str == NULL)
4715 src_str = c_getstr (arg1);
4716 if (src_str != NULL)
4717 std::swap (arg1_rtx, arg2_rtx);
4720 /* If SRC is a string constant and block move would be done
4721 by pieces, we can avoid loading the string from memory
4722 and only stored the computed constants. */
4723 if (src_str
4724 && CONST_INT_P (len_rtx)
4725 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4726 constfn = builtin_memcpy_read_str;
4728 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4729 TREE_TYPE (len), target,
4730 result_eq, constfn,
4731 CONST_CAST (char *, src_str));
4733 if (result)
4735 /* Return the value in the proper mode for this function. */
4736 if (GET_MODE (result) == mode)
4737 return result;
4739 if (target != 0)
4741 convert_move (target, result, 0);
4742 return target;
4745 return convert_to_mode (mode, result, 0);
4748 return NULL_RTX;
4751 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4752 if we failed the caller should emit a normal call, otherwise try to get
4753 the result in TARGET, if convenient. */
4755 static rtx
4756 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4758 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4759 return NULL_RTX;
4761 /* Due to the performance benefit, always inline the calls first. */
4762 rtx result = NULL_RTX;
4763 result = inline_expand_builtin_string_cmp (exp, target);
4764 if (result)
4765 return result;
4767 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4768 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4769 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4770 return NULL_RTX;
4772 tree arg1 = CALL_EXPR_ARG (exp, 0);
4773 tree arg2 = CALL_EXPR_ARG (exp, 1);
4775 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4776 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4778 /* If we don't have POINTER_TYPE, call the function. */
4779 if (arg1_align == 0 || arg2_align == 0)
4780 return NULL_RTX;
4782 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4783 arg1 = builtin_save_expr (arg1);
4784 arg2 = builtin_save_expr (arg2);
4786 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4787 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4789 /* Try to call cmpstrsi. */
4790 if (cmpstr_icode != CODE_FOR_nothing)
4791 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4792 MIN (arg1_align, arg2_align));
4794 /* Try to determine at least one length and call cmpstrnsi. */
4795 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4797 tree len;
4798 rtx arg3_rtx;
4800 tree len1 = c_strlen (arg1, 1);
4801 tree len2 = c_strlen (arg2, 1);
4803 if (len1)
4804 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4805 if (len2)
4806 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4808 /* If we don't have a constant length for the first, use the length
4809 of the second, if we know it. We don't require a constant for
4810 this case; some cost analysis could be done if both are available
4811 but neither is constant. For now, assume they're equally cheap,
4812 unless one has side effects. If both strings have constant lengths,
4813 use the smaller. */
4815 if (!len1)
4816 len = len2;
4817 else if (!len2)
4818 len = len1;
4819 else if (TREE_SIDE_EFFECTS (len1))
4820 len = len2;
4821 else if (TREE_SIDE_EFFECTS (len2))
4822 len = len1;
4823 else if (TREE_CODE (len1) != INTEGER_CST)
4824 len = len2;
4825 else if (TREE_CODE (len2) != INTEGER_CST)
4826 len = len1;
4827 else if (tree_int_cst_lt (len1, len2))
4828 len = len1;
4829 else
4830 len = len2;
4832 /* If both arguments have side effects, we cannot optimize. */
4833 if (len && !TREE_SIDE_EFFECTS (len))
4835 arg3_rtx = expand_normal (len);
4836 result = expand_cmpstrn_or_cmpmem
4837 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4838 arg3_rtx, MIN (arg1_align, arg2_align));
4842 tree fndecl = get_callee_fndecl (exp);
4843 if (result)
4845 /* Check to see if the argument was declared attribute nonstring
4846 and if so, issue a warning since at this point it's not known
4847 to be nul-terminated. */
4848 maybe_warn_nonstring_arg (fndecl, exp);
4850 /* Return the value in the proper mode for this function. */
4851 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4852 if (GET_MODE (result) == mode)
4853 return result;
4854 if (target == 0)
4855 return convert_to_mode (mode, result, 0);
4856 convert_move (target, result, 0);
4857 return target;
4860 /* Expand the library call ourselves using a stabilized argument
4861 list to avoid re-evaluating the function's arguments twice. */
4862 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4863 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4864 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4865 return expand_call (fn, target, target == const0_rtx);
4868 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4869 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4870 the result in TARGET, if convenient. */
4872 static rtx
4873 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4874 ATTRIBUTE_UNUSED machine_mode mode)
4876 if (!validate_arglist (exp,
4877 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4878 return NULL_RTX;
4880 /* Due to the performance benefit, always inline the calls first. */
4881 rtx result = NULL_RTX;
4882 result = inline_expand_builtin_string_cmp (exp, target);
4883 if (result)
4884 return result;
4886 /* If c_strlen can determine an expression for one of the string
4887 lengths, and it doesn't have side effects, then emit cmpstrnsi
4888 using length MIN(strlen(string)+1, arg3). */
4889 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4890 if (cmpstrn_icode == CODE_FOR_nothing)
4891 return NULL_RTX;
4893 tree len;
4895 tree arg1 = CALL_EXPR_ARG (exp, 0);
4896 tree arg2 = CALL_EXPR_ARG (exp, 1);
4897 tree arg3 = CALL_EXPR_ARG (exp, 2);
4899 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4900 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4902 tree len1 = c_strlen (arg1, 1);
4903 tree len2 = c_strlen (arg2, 1);
4905 location_t loc = EXPR_LOCATION (exp);
4907 if (len1)
4908 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4909 if (len2)
4910 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4912 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4914 /* If we don't have a constant length for the first, use the length
4915 of the second, if we know it. If neither string is constant length,
4916 use the given length argument. We don't require a constant for
4917 this case; some cost analysis could be done if both are available
4918 but neither is constant. For now, assume they're equally cheap,
4919 unless one has side effects. If both strings have constant lengths,
4920 use the smaller. */
4922 if (!len1 && !len2)
4923 len = len3;
4924 else if (!len1)
4925 len = len2;
4926 else if (!len2)
4927 len = len1;
4928 else if (TREE_SIDE_EFFECTS (len1))
4929 len = len2;
4930 else if (TREE_SIDE_EFFECTS (len2))
4931 len = len1;
4932 else if (TREE_CODE (len1) != INTEGER_CST)
4933 len = len2;
4934 else if (TREE_CODE (len2) != INTEGER_CST)
4935 len = len1;
4936 else if (tree_int_cst_lt (len1, len2))
4937 len = len1;
4938 else
4939 len = len2;
4941 /* If we are not using the given length, we must incorporate it here.
4942 The actual new length parameter will be MIN(len,arg3) in this case. */
4943 if (len != len3)
4945 len = fold_convert_loc (loc, sizetype, len);
4946 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4948 rtx arg1_rtx = get_memory_rtx (arg1, len);
4949 rtx arg2_rtx = get_memory_rtx (arg2, len);
4950 rtx arg3_rtx = expand_normal (len);
4951 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4952 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4953 MIN (arg1_align, arg2_align));
4955 tree fndecl = get_callee_fndecl (exp);
4956 if (result)
4958 /* Check to see if the argument was declared attribute nonstring
4959 and if so, issue a warning since at this point it's not known
4960 to be nul-terminated. */
4961 maybe_warn_nonstring_arg (fndecl, exp);
4963 /* Return the value in the proper mode for this function. */
4964 mode = TYPE_MODE (TREE_TYPE (exp));
4965 if (GET_MODE (result) == mode)
4966 return result;
4967 if (target == 0)
4968 return convert_to_mode (mode, result, 0);
4969 convert_move (target, result, 0);
4970 return target;
4973 /* Expand the library call ourselves using a stabilized argument
4974 list to avoid re-evaluating the function's arguments twice. */
4975 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4976 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4977 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4978 return expand_call (fn, target, target == const0_rtx);
4981 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4982 if that's convenient. */
4985 expand_builtin_saveregs (void)
4987 rtx val;
4988 rtx_insn *seq;
4990 /* Don't do __builtin_saveregs more than once in a function.
4991 Save the result of the first call and reuse it. */
4992 if (saveregs_value != 0)
4993 return saveregs_value;
4995 /* When this function is called, it means that registers must be
4996 saved on entry to this function. So we migrate the call to the
4997 first insn of this function. */
4999 start_sequence ();
5001 /* Do whatever the machine needs done in this case. */
5002 val = targetm.calls.expand_builtin_saveregs ();
5004 seq = get_insns ();
5005 end_sequence ();
5007 saveregs_value = val;
5009 /* Put the insns after the NOTE that starts the function. If this
5010 is inside a start_sequence, make the outer-level insn chain current, so
5011 the code is placed at the start of the function. */
5012 push_topmost_sequence ();
5013 emit_insn_after (seq, entry_of_function ());
5014 pop_topmost_sequence ();
5016 return val;
5019 /* Expand a call to __builtin_next_arg. */
5021 static rtx
5022 expand_builtin_next_arg (void)
5024 /* Checking arguments is already done in fold_builtin_next_arg
5025 that must be called before this function. */
5026 return expand_binop (ptr_mode, add_optab,
5027 crtl->args.internal_arg_pointer,
5028 crtl->args.arg_offset_rtx,
5029 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5032 /* Make it easier for the backends by protecting the valist argument
5033 from multiple evaluations. */
5035 static tree
5036 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
5038 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
5040 /* The current way of determining the type of valist is completely
5041 bogus. We should have the information on the va builtin instead. */
5042 if (!vatype)
5043 vatype = targetm.fn_abi_va_list (cfun->decl);
5045 if (TREE_CODE (vatype) == ARRAY_TYPE)
5047 if (TREE_SIDE_EFFECTS (valist))
5048 valist = save_expr (valist);
5050 /* For this case, the backends will be expecting a pointer to
5051 vatype, but it's possible we've actually been given an array
5052 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
5053 So fix it. */
5054 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5056 tree p1 = build_pointer_type (TREE_TYPE (vatype));
5057 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
5060 else
5062 tree pt = build_pointer_type (vatype);
5064 if (! needs_lvalue)
5066 if (! TREE_SIDE_EFFECTS (valist))
5067 return valist;
5069 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
5070 TREE_SIDE_EFFECTS (valist) = 1;
5073 if (TREE_SIDE_EFFECTS (valist))
5074 valist = save_expr (valist);
5075 valist = fold_build2_loc (loc, MEM_REF,
5076 vatype, valist, build_int_cst (pt, 0));
5079 return valist;
5082 /* The "standard" definition of va_list is void*. */
5084 tree
5085 std_build_builtin_va_list (void)
5087 return ptr_type_node;
5090 /* The "standard" abi va_list is va_list_type_node. */
5092 tree
5093 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
5095 return va_list_type_node;
5098 /* The "standard" type of va_list is va_list_type_node. */
5100 tree
5101 std_canonical_va_list_type (tree type)
5103 tree wtype, htype;
5105 wtype = va_list_type_node;
5106 htype = type;
5108 if (TREE_CODE (wtype) == ARRAY_TYPE)
5110 /* If va_list is an array type, the argument may have decayed
5111 to a pointer type, e.g. by being passed to another function.
5112 In that case, unwrap both types so that we can compare the
5113 underlying records. */
5114 if (TREE_CODE (htype) == ARRAY_TYPE
5115 || POINTER_TYPE_P (htype))
5117 wtype = TREE_TYPE (wtype);
5118 htype = TREE_TYPE (htype);
5121 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
5122 return va_list_type_node;
5124 return NULL_TREE;
5127 /* The "standard" implementation of va_start: just assign `nextarg' to
5128 the variable. */
5130 void
5131 std_expand_builtin_va_start (tree valist, rtx nextarg)
5133 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
5134 convert_move (va_r, nextarg, 0);
5137 /* Expand EXP, a call to __builtin_va_start. */
5139 static rtx
5140 expand_builtin_va_start (tree exp)
5142 rtx nextarg;
5143 tree valist;
5144 location_t loc = EXPR_LOCATION (exp);
5146 if (call_expr_nargs (exp) < 2)
5148 error_at (loc, "too few arguments to function %<va_start%>");
5149 return const0_rtx;
5152 if (fold_builtin_next_arg (exp, true))
5153 return const0_rtx;
5155 nextarg = expand_builtin_next_arg ();
5156 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
5158 if (targetm.expand_builtin_va_start)
5159 targetm.expand_builtin_va_start (valist, nextarg);
5160 else
5161 std_expand_builtin_va_start (valist, nextarg);
5163 return const0_rtx;
5166 /* Expand EXP, a call to __builtin_va_end. */
5168 static rtx
5169 expand_builtin_va_end (tree exp)
5171 tree valist = CALL_EXPR_ARG (exp, 0);
5173 /* Evaluate for side effects, if needed. I hate macros that don't
5174 do that. */
5175 if (TREE_SIDE_EFFECTS (valist))
5176 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5178 return const0_rtx;
5181 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5182 builtin rather than just as an assignment in stdarg.h because of the
5183 nastiness of array-type va_list types. */
5185 static rtx
5186 expand_builtin_va_copy (tree exp)
5188 tree dst, src, t;
5189 location_t loc = EXPR_LOCATION (exp);
5191 dst = CALL_EXPR_ARG (exp, 0);
5192 src = CALL_EXPR_ARG (exp, 1);
5194 dst = stabilize_va_list_loc (loc, dst, 1);
5195 src = stabilize_va_list_loc (loc, src, 0);
5197 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5199 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5201 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5202 TREE_SIDE_EFFECTS (t) = 1;
5203 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5205 else
5207 rtx dstb, srcb, size;
5209 /* Evaluate to pointers. */
5210 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5211 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5212 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5213 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5215 dstb = convert_memory_address (Pmode, dstb);
5216 srcb = convert_memory_address (Pmode, srcb);
5218 /* "Dereference" to BLKmode memories. */
5219 dstb = gen_rtx_MEM (BLKmode, dstb);
5220 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5221 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5222 srcb = gen_rtx_MEM (BLKmode, srcb);
5223 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5224 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5226 /* Copy. */
5227 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5230 return const0_rtx;
5233 /* Expand a call to one of the builtin functions __builtin_frame_address or
5234 __builtin_return_address. */
5236 static rtx
5237 expand_builtin_frame_address (tree fndecl, tree exp)
5239 /* The argument must be a nonnegative integer constant.
5240 It counts the number of frames to scan up the stack.
5241 The value is either the frame pointer value or the return
5242 address saved in that frame. */
5243 if (call_expr_nargs (exp) == 0)
5244 /* Warning about missing arg was already issued. */
5245 return const0_rtx;
5246 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5248 error ("invalid argument to %qD", fndecl);
5249 return const0_rtx;
5251 else
5253 /* Number of frames to scan up the stack. */
5254 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5256 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5258 /* Some ports cannot access arbitrary stack frames. */
5259 if (tem == NULL)
5261 warning (0, "unsupported argument to %qD", fndecl);
5262 return const0_rtx;
5265 if (count)
5267 /* Warn since no effort is made to ensure that any frame
5268 beyond the current one exists or can be safely reached. */
5269 warning (OPT_Wframe_address, "calling %qD with "
5270 "a nonzero argument is unsafe", fndecl);
5273 /* For __builtin_frame_address, return what we've got. */
5274 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5275 return tem;
5277 if (!REG_P (tem)
5278 && ! CONSTANT_P (tem))
5279 tem = copy_addr_to_reg (tem);
5280 return tem;
5284 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5285 failed and the caller should emit a normal call. */
5287 static rtx
5288 expand_builtin_alloca (tree exp)
5290 rtx op0;
5291 rtx result;
5292 unsigned int align;
5293 tree fndecl = get_callee_fndecl (exp);
5294 HOST_WIDE_INT max_size;
5295 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5296 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5297 bool valid_arglist
5298 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5299 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5300 VOID_TYPE)
5301 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5302 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5303 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5305 if (!valid_arglist)
5306 return NULL_RTX;
5308 if ((alloca_for_var
5309 && warn_vla_limit >= HOST_WIDE_INT_MAX
5310 && warn_alloc_size_limit < warn_vla_limit)
5311 || (!alloca_for_var
5312 && warn_alloca_limit >= HOST_WIDE_INT_MAX
5313 && warn_alloc_size_limit < warn_alloca_limit
5316 /* -Walloca-larger-than and -Wvla-larger-than settings of
5317 less than HOST_WIDE_INT_MAX override the more general
5318 -Walloc-size-larger-than so unless either of the former
5319 options is smaller than the last one (wchich would imply
5320 that the call was already checked), check the alloca
5321 arguments for overflow. */
5322 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5323 int idx[] = { 0, -1 };
5324 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5327 /* Compute the argument. */
5328 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5330 /* Compute the alignment. */
5331 align = (fcode == BUILT_IN_ALLOCA
5332 ? BIGGEST_ALIGNMENT
5333 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5335 /* Compute the maximum size. */
5336 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5337 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5338 : -1);
5340 /* Allocate the desired space. If the allocation stems from the declaration
5341 of a variable-sized object, it cannot accumulate. */
5342 result
5343 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5344 result = convert_memory_address (ptr_mode, result);
5346 return result;
5349 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5350 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5351 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5352 handle_builtin_stack_restore function. */
5354 static rtx
5355 expand_asan_emit_allocas_unpoison (tree exp)
5357 tree arg0 = CALL_EXPR_ARG (exp, 0);
5358 tree arg1 = CALL_EXPR_ARG (exp, 1);
5359 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5360 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5361 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5362 stack_pointer_rtx, NULL_RTX, 0,
5363 OPTAB_LIB_WIDEN);
5364 off = convert_modes (ptr_mode, Pmode, off, 0);
5365 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5366 OPTAB_LIB_WIDEN);
5367 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5368 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5369 top, ptr_mode, bot, ptr_mode);
5370 return ret;
5373 /* Expand a call to bswap builtin in EXP.
5374 Return NULL_RTX if a normal call should be emitted rather than expanding the
5375 function in-line. If convenient, the result should be placed in TARGET.
5376 SUBTARGET may be used as the target for computing one of EXP's operands. */
5378 static rtx
5379 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5380 rtx subtarget)
5382 tree arg;
5383 rtx op0;
5385 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5386 return NULL_RTX;
5388 arg = CALL_EXPR_ARG (exp, 0);
5389 op0 = expand_expr (arg,
5390 subtarget && GET_MODE (subtarget) == target_mode
5391 ? subtarget : NULL_RTX,
5392 target_mode, EXPAND_NORMAL);
5393 if (GET_MODE (op0) != target_mode)
5394 op0 = convert_to_mode (target_mode, op0, 1);
5396 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5398 gcc_assert (target);
5400 return convert_to_mode (target_mode, target, 1);
5403 /* Expand a call to a unary builtin in EXP.
5404 Return NULL_RTX if a normal call should be emitted rather than expanding the
5405 function in-line. If convenient, the result should be placed in TARGET.
5406 SUBTARGET may be used as the target for computing one of EXP's operands. */
5408 static rtx
5409 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5410 rtx subtarget, optab op_optab)
5412 rtx op0;
5414 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5415 return NULL_RTX;
5417 /* Compute the argument. */
5418 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5419 (subtarget
5420 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5421 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5422 VOIDmode, EXPAND_NORMAL);
5423 /* Compute op, into TARGET if possible.
5424 Set TARGET to wherever the result comes back. */
5425 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5426 op_optab, op0, target, op_optab != clrsb_optab);
5427 gcc_assert (target);
5429 return convert_to_mode (target_mode, target, 0);
5432 /* Expand a call to __builtin_expect. We just return our argument
5433 as the builtin_expect semantic should've been already executed by
5434 tree branch prediction pass. */
5436 static rtx
5437 expand_builtin_expect (tree exp, rtx target)
5439 tree arg;
5441 if (call_expr_nargs (exp) < 2)
5442 return const0_rtx;
5443 arg = CALL_EXPR_ARG (exp, 0);
5445 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5446 /* When guessing was done, the hints should be already stripped away. */
5447 gcc_assert (!flag_guess_branch_prob
5448 || optimize == 0 || seen_error ());
5449 return target;
5452 /* Expand a call to __builtin_expect_with_probability. We just return our
5453 argument as the builtin_expect semantic should've been already executed by
5454 tree branch prediction pass. */
5456 static rtx
5457 expand_builtin_expect_with_probability (tree exp, rtx target)
5459 tree arg;
5461 if (call_expr_nargs (exp) < 3)
5462 return const0_rtx;
5463 arg = CALL_EXPR_ARG (exp, 0);
5465 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5466 /* When guessing was done, the hints should be already stripped away. */
5467 gcc_assert (!flag_guess_branch_prob
5468 || optimize == 0 || seen_error ());
5469 return target;
5473 /* Expand a call to __builtin_assume_aligned. We just return our first
5474 argument as the builtin_assume_aligned semantic should've been already
5475 executed by CCP. */
5477 static rtx
5478 expand_builtin_assume_aligned (tree exp, rtx target)
5480 if (call_expr_nargs (exp) < 2)
5481 return const0_rtx;
5482 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5483 EXPAND_NORMAL);
5484 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5485 && (call_expr_nargs (exp) < 3
5486 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5487 return target;
5490 void
5491 expand_builtin_trap (void)
5493 if (targetm.have_trap ())
5495 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5496 /* For trap insns when not accumulating outgoing args force
5497 REG_ARGS_SIZE note to prevent crossjumping of calls with
5498 different args sizes. */
5499 if (!ACCUMULATE_OUTGOING_ARGS)
5500 add_args_size_note (insn, stack_pointer_delta);
5502 else
5504 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5505 tree call_expr = build_call_expr (fn, 0);
5506 expand_call (call_expr, NULL_RTX, false);
5509 emit_barrier ();
5512 /* Expand a call to __builtin_unreachable. We do nothing except emit
5513 a barrier saying that control flow will not pass here.
5515 It is the responsibility of the program being compiled to ensure
5516 that control flow does never reach __builtin_unreachable. */
5517 static void
5518 expand_builtin_unreachable (void)
5520 emit_barrier ();
5523 /* Expand EXP, a call to fabs, fabsf or fabsl.
5524 Return NULL_RTX if a normal call should be emitted rather than expanding
5525 the function inline. If convenient, the result should be placed
5526 in TARGET. SUBTARGET may be used as the target for computing
5527 the operand. */
5529 static rtx
5530 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5532 machine_mode mode;
5533 tree arg;
5534 rtx op0;
5536 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5537 return NULL_RTX;
5539 arg = CALL_EXPR_ARG (exp, 0);
5540 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5541 mode = TYPE_MODE (TREE_TYPE (arg));
5542 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5543 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5546 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5547 Return NULL is a normal call should be emitted rather than expanding the
5548 function inline. If convenient, the result should be placed in TARGET.
5549 SUBTARGET may be used as the target for computing the operand. */
5551 static rtx
5552 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5554 rtx op0, op1;
5555 tree arg;
5557 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5558 return NULL_RTX;
5560 arg = CALL_EXPR_ARG (exp, 0);
5561 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5563 arg = CALL_EXPR_ARG (exp, 1);
5564 op1 = expand_normal (arg);
5566 return expand_copysign (op0, op1, target);
5569 /* Expand a call to __builtin___clear_cache. */
5571 static rtx
5572 expand_builtin___clear_cache (tree exp)
5574 if (!targetm.code_for_clear_cache)
5576 #ifdef CLEAR_INSN_CACHE
5577 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5578 does something. Just do the default expansion to a call to
5579 __clear_cache(). */
5580 return NULL_RTX;
5581 #else
5582 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5583 does nothing. There is no need to call it. Do nothing. */
5584 return const0_rtx;
5585 #endif /* CLEAR_INSN_CACHE */
5588 /* We have a "clear_cache" insn, and it will handle everything. */
5589 tree begin, end;
5590 rtx begin_rtx, end_rtx;
5592 /* We must not expand to a library call. If we did, any
5593 fallback library function in libgcc that might contain a call to
5594 __builtin___clear_cache() would recurse infinitely. */
5595 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5597 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5598 return const0_rtx;
5601 if (targetm.have_clear_cache ())
5603 struct expand_operand ops[2];
5605 begin = CALL_EXPR_ARG (exp, 0);
5606 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5608 end = CALL_EXPR_ARG (exp, 1);
5609 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5611 create_address_operand (&ops[0], begin_rtx);
5612 create_address_operand (&ops[1], end_rtx);
5613 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5614 return const0_rtx;
5616 return const0_rtx;
5619 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5621 static rtx
5622 round_trampoline_addr (rtx tramp)
5624 rtx temp, addend, mask;
5626 /* If we don't need too much alignment, we'll have been guaranteed
5627 proper alignment by get_trampoline_type. */
5628 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5629 return tramp;
5631 /* Round address up to desired boundary. */
5632 temp = gen_reg_rtx (Pmode);
5633 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5634 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5636 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5637 temp, 0, OPTAB_LIB_WIDEN);
5638 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5639 temp, 0, OPTAB_LIB_WIDEN);
5641 return tramp;
5644 static rtx
5645 expand_builtin_init_trampoline (tree exp, bool onstack)
5647 tree t_tramp, t_func, t_chain;
5648 rtx m_tramp, r_tramp, r_chain, tmp;
5650 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5651 POINTER_TYPE, VOID_TYPE))
5652 return NULL_RTX;
5654 t_tramp = CALL_EXPR_ARG (exp, 0);
5655 t_func = CALL_EXPR_ARG (exp, 1);
5656 t_chain = CALL_EXPR_ARG (exp, 2);
5658 r_tramp = expand_normal (t_tramp);
5659 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5660 MEM_NOTRAP_P (m_tramp) = 1;
5662 /* If ONSTACK, the TRAMP argument should be the address of a field
5663 within the local function's FRAME decl. Either way, let's see if
5664 we can fill in the MEM_ATTRs for this memory. */
5665 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5666 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5668 /* Creator of a heap trampoline is responsible for making sure the
5669 address is aligned to at least STACK_BOUNDARY. Normally malloc
5670 will ensure this anyhow. */
5671 tmp = round_trampoline_addr (r_tramp);
5672 if (tmp != r_tramp)
5674 m_tramp = change_address (m_tramp, BLKmode, tmp);
5675 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5676 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5679 /* The FUNC argument should be the address of the nested function.
5680 Extract the actual function decl to pass to the hook. */
5681 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5682 t_func = TREE_OPERAND (t_func, 0);
5683 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5685 r_chain = expand_normal (t_chain);
5687 /* Generate insns to initialize the trampoline. */
5688 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5690 if (onstack)
5692 trampolines_created = 1;
5694 if (targetm.calls.custom_function_descriptors != 0)
5695 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5696 "trampoline generated for nested function %qD", t_func);
5699 return const0_rtx;
5702 static rtx
5703 expand_builtin_adjust_trampoline (tree exp)
5705 rtx tramp;
5707 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5708 return NULL_RTX;
5710 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5711 tramp = round_trampoline_addr (tramp);
5712 if (targetm.calls.trampoline_adjust_address)
5713 tramp = targetm.calls.trampoline_adjust_address (tramp);
5715 return tramp;
5718 /* Expand a call to the builtin descriptor initialization routine.
5719 A descriptor is made up of a couple of pointers to the static
5720 chain and the code entry in this order. */
5722 static rtx
5723 expand_builtin_init_descriptor (tree exp)
5725 tree t_descr, t_func, t_chain;
5726 rtx m_descr, r_descr, r_func, r_chain;
5728 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5729 VOID_TYPE))
5730 return NULL_RTX;
5732 t_descr = CALL_EXPR_ARG (exp, 0);
5733 t_func = CALL_EXPR_ARG (exp, 1);
5734 t_chain = CALL_EXPR_ARG (exp, 2);
5736 r_descr = expand_normal (t_descr);
5737 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5738 MEM_NOTRAP_P (m_descr) = 1;
5740 r_func = expand_normal (t_func);
5741 r_chain = expand_normal (t_chain);
5743 /* Generate insns to initialize the descriptor. */
5744 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5745 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5746 POINTER_SIZE / BITS_PER_UNIT), r_func);
5748 return const0_rtx;
5751 /* Expand a call to the builtin descriptor adjustment routine. */
5753 static rtx
5754 expand_builtin_adjust_descriptor (tree exp)
5756 rtx tramp;
5758 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5759 return NULL_RTX;
5761 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5763 /* Unalign the descriptor to allow runtime identification. */
5764 tramp = plus_constant (ptr_mode, tramp,
5765 targetm.calls.custom_function_descriptors);
5767 return force_operand (tramp, NULL_RTX);
5770 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5771 function. The function first checks whether the back end provides
5772 an insn to implement signbit for the respective mode. If not, it
5773 checks whether the floating point format of the value is such that
5774 the sign bit can be extracted. If that is not the case, error out.
5775 EXP is the expression that is a call to the builtin function; if
5776 convenient, the result should be placed in TARGET. */
5777 static rtx
5778 expand_builtin_signbit (tree exp, rtx target)
5780 const struct real_format *fmt;
5781 scalar_float_mode fmode;
5782 scalar_int_mode rmode, imode;
5783 tree arg;
5784 int word, bitpos;
5785 enum insn_code icode;
5786 rtx temp;
5787 location_t loc = EXPR_LOCATION (exp);
5789 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5790 return NULL_RTX;
5792 arg = CALL_EXPR_ARG (exp, 0);
5793 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5794 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5795 fmt = REAL_MODE_FORMAT (fmode);
5797 arg = builtin_save_expr (arg);
5799 /* Expand the argument yielding a RTX expression. */
5800 temp = expand_normal (arg);
5802 /* Check if the back end provides an insn that handles signbit for the
5803 argument's mode. */
5804 icode = optab_handler (signbit_optab, fmode);
5805 if (icode != CODE_FOR_nothing)
5807 rtx_insn *last = get_last_insn ();
5808 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5809 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5810 return target;
5811 delete_insns_since (last);
5814 /* For floating point formats without a sign bit, implement signbit
5815 as "ARG < 0.0". */
5816 bitpos = fmt->signbit_ro;
5817 if (bitpos < 0)
5819 /* But we can't do this if the format supports signed zero. */
5820 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5822 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5823 build_real (TREE_TYPE (arg), dconst0));
5824 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5827 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5829 imode = int_mode_for_mode (fmode).require ();
5830 temp = gen_lowpart (imode, temp);
5832 else
5834 imode = word_mode;
5835 /* Handle targets with different FP word orders. */
5836 if (FLOAT_WORDS_BIG_ENDIAN)
5837 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5838 else
5839 word = bitpos / BITS_PER_WORD;
5840 temp = operand_subword_force (temp, word, fmode);
5841 bitpos = bitpos % BITS_PER_WORD;
5844 /* Force the intermediate word_mode (or narrower) result into a
5845 register. This avoids attempting to create paradoxical SUBREGs
5846 of floating point modes below. */
5847 temp = force_reg (imode, temp);
5849 /* If the bitpos is within the "result mode" lowpart, the operation
5850 can be implement with a single bitwise AND. Otherwise, we need
5851 a right shift and an AND. */
5853 if (bitpos < GET_MODE_BITSIZE (rmode))
5855 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5857 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5858 temp = gen_lowpart (rmode, temp);
5859 temp = expand_binop (rmode, and_optab, temp,
5860 immed_wide_int_const (mask, rmode),
5861 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5863 else
5865 /* Perform a logical right shift to place the signbit in the least
5866 significant bit, then truncate the result to the desired mode
5867 and mask just this bit. */
5868 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5869 temp = gen_lowpart (rmode, temp);
5870 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5871 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5874 return temp;
5877 /* Expand fork or exec calls. TARGET is the desired target of the
5878 call. EXP is the call. FN is the
5879 identificator of the actual function. IGNORE is nonzero if the
5880 value is to be ignored. */
5882 static rtx
5883 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5885 tree id, decl;
5886 tree call;
5888 /* If we are not profiling, just call the function. */
5889 if (!profile_arc_flag)
5890 return NULL_RTX;
5892 /* Otherwise call the wrapper. This should be equivalent for the rest of
5893 compiler, so the code does not diverge, and the wrapper may run the
5894 code necessary for keeping the profiling sane. */
5896 switch (DECL_FUNCTION_CODE (fn))
5898 case BUILT_IN_FORK:
5899 id = get_identifier ("__gcov_fork");
5900 break;
5902 case BUILT_IN_EXECL:
5903 id = get_identifier ("__gcov_execl");
5904 break;
5906 case BUILT_IN_EXECV:
5907 id = get_identifier ("__gcov_execv");
5908 break;
5910 case BUILT_IN_EXECLP:
5911 id = get_identifier ("__gcov_execlp");
5912 break;
5914 case BUILT_IN_EXECLE:
5915 id = get_identifier ("__gcov_execle");
5916 break;
5918 case BUILT_IN_EXECVP:
5919 id = get_identifier ("__gcov_execvp");
5920 break;
5922 case BUILT_IN_EXECVE:
5923 id = get_identifier ("__gcov_execve");
5924 break;
5926 default:
5927 gcc_unreachable ();
5930 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5931 FUNCTION_DECL, id, TREE_TYPE (fn));
5932 DECL_EXTERNAL (decl) = 1;
5933 TREE_PUBLIC (decl) = 1;
5934 DECL_ARTIFICIAL (decl) = 1;
5935 TREE_NOTHROW (decl) = 1;
5936 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5937 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5938 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5939 return expand_call (call, target, ignore);
5944 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5945 the pointer in these functions is void*, the tree optimizers may remove
5946 casts. The mode computed in expand_builtin isn't reliable either, due
5947 to __sync_bool_compare_and_swap.
5949 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5950 group of builtins. This gives us log2 of the mode size. */
5952 static inline machine_mode
5953 get_builtin_sync_mode (int fcode_diff)
5955 /* The size is not negotiable, so ask not to get BLKmode in return
5956 if the target indicates that a smaller size would be better. */
5957 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5960 /* Expand the memory expression LOC and return the appropriate memory operand
5961 for the builtin_sync operations. */
5963 static rtx
5964 get_builtin_sync_mem (tree loc, machine_mode mode)
5966 rtx addr, mem;
5967 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
5968 ? TREE_TYPE (TREE_TYPE (loc))
5969 : TREE_TYPE (loc));
5970 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
5972 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
5973 addr = convert_memory_address (addr_mode, addr);
5975 /* Note that we explicitly do not want any alias information for this
5976 memory, so that we kill all other live memories. Otherwise we don't
5977 satisfy the full barrier semantics of the intrinsic. */
5978 mem = gen_rtx_MEM (mode, addr);
5980 set_mem_addr_space (mem, addr_space);
5982 mem = validize_mem (mem);
5984 /* The alignment needs to be at least according to that of the mode. */
5985 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5986 get_pointer_alignment (loc)));
5987 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5988 MEM_VOLATILE_P (mem) = 1;
5990 return mem;
5993 /* Make sure an argument is in the right mode.
5994 EXP is the tree argument.
5995 MODE is the mode it should be in. */
5997 static rtx
5998 expand_expr_force_mode (tree exp, machine_mode mode)
6000 rtx val;
6001 machine_mode old_mode;
6003 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
6004 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6005 of CONST_INTs, where we know the old_mode only from the call argument. */
6007 old_mode = GET_MODE (val);
6008 if (old_mode == VOIDmode)
6009 old_mode = TYPE_MODE (TREE_TYPE (exp));
6010 val = convert_modes (mode, old_mode, val, 1);
6011 return val;
6015 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6016 EXP is the CALL_EXPR. CODE is the rtx code
6017 that corresponds to the arithmetic or logical operation from the name;
6018 an exception here is that NOT actually means NAND. TARGET is an optional
6019 place for us to store the results; AFTER is true if this is the
6020 fetch_and_xxx form. */
6022 static rtx
6023 expand_builtin_sync_operation (machine_mode mode, tree exp,
6024 enum rtx_code code, bool after,
6025 rtx target)
6027 rtx val, mem;
6028 location_t loc = EXPR_LOCATION (exp);
6030 if (code == NOT && warn_sync_nand)
6032 tree fndecl = get_callee_fndecl (exp);
6033 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6035 static bool warned_f_a_n, warned_n_a_f;
6037 switch (fcode)
6039 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6040 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6041 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6042 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6043 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6044 if (warned_f_a_n)
6045 break;
6047 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
6048 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6049 warned_f_a_n = true;
6050 break;
6052 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6053 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6054 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6055 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6056 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6057 if (warned_n_a_f)
6058 break;
6060 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
6061 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6062 warned_n_a_f = true;
6063 break;
6065 default:
6066 gcc_unreachable ();
6070 /* Expand the operands. */
6071 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6072 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6074 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
6075 after);
6078 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6079 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6080 true if this is the boolean form. TARGET is a place for us to store the
6081 results; this is NOT optional if IS_BOOL is true. */
6083 static rtx
6084 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
6085 bool is_bool, rtx target)
6087 rtx old_val, new_val, mem;
6088 rtx *pbool, *poval;
6090 /* Expand the operands. */
6091 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6092 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6093 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6095 pbool = poval = NULL;
6096 if (target != const0_rtx)
6098 if (is_bool)
6099 pbool = &target;
6100 else
6101 poval = &target;
6103 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
6104 false, MEMMODEL_SYNC_SEQ_CST,
6105 MEMMODEL_SYNC_SEQ_CST))
6106 return NULL_RTX;
6108 return target;
6111 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6112 general form is actually an atomic exchange, and some targets only
6113 support a reduced form with the second argument being a constant 1.
6114 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6115 the results. */
6117 static rtx
6118 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
6119 rtx target)
6121 rtx val, mem;
6123 /* Expand the operands. */
6124 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6125 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6127 return expand_sync_lock_test_and_set (target, mem, val);
6130 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6132 static void
6133 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
6135 rtx mem;
6137 /* Expand the operands. */
6138 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6140 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
6143 /* Given an integer representing an ``enum memmodel'', verify its
6144 correctness and return the memory model enum. */
6146 static enum memmodel
6147 get_memmodel (tree exp)
6149 rtx op;
6150 unsigned HOST_WIDE_INT val;
6151 source_location loc
6152 = expansion_point_location_if_in_system_header (input_location);
6154 /* If the parameter is not a constant, it's a run time value so we'll just
6155 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6156 if (TREE_CODE (exp) != INTEGER_CST)
6157 return MEMMODEL_SEQ_CST;
6159 op = expand_normal (exp);
6161 val = INTVAL (op);
6162 if (targetm.memmodel_check)
6163 val = targetm.memmodel_check (val);
6164 else if (val & ~MEMMODEL_MASK)
6166 warning_at (loc, OPT_Winvalid_memory_model,
6167 "unknown architecture specifier in memory model to builtin");
6168 return MEMMODEL_SEQ_CST;
6171 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6172 if (memmodel_base (val) >= MEMMODEL_LAST)
6174 warning_at (loc, OPT_Winvalid_memory_model,
6175 "invalid memory model argument to builtin");
6176 return MEMMODEL_SEQ_CST;
6179 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6180 be conservative and promote consume to acquire. */
6181 if (val == MEMMODEL_CONSUME)
6182 val = MEMMODEL_ACQUIRE;
6184 return (enum memmodel) val;
6187 /* Expand the __atomic_exchange intrinsic:
6188 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6189 EXP is the CALL_EXPR.
6190 TARGET is an optional place for us to store the results. */
6192 static rtx
6193 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
6195 rtx val, mem;
6196 enum memmodel model;
6198 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6200 if (!flag_inline_atomics)
6201 return NULL_RTX;
6203 /* Expand the operands. */
6204 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6205 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6207 return expand_atomic_exchange (target, mem, val, model);
6210 /* Expand the __atomic_compare_exchange intrinsic:
6211 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6212 TYPE desired, BOOL weak,
6213 enum memmodel success,
6214 enum memmodel failure)
6215 EXP is the CALL_EXPR.
6216 TARGET is an optional place for us to store the results. */
6218 static rtx
6219 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
6220 rtx target)
6222 rtx expect, desired, mem, oldval;
6223 rtx_code_label *label;
6224 enum memmodel success, failure;
6225 tree weak;
6226 bool is_weak;
6227 source_location loc
6228 = expansion_point_location_if_in_system_header (input_location);
6230 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6231 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6233 if (failure > success)
6235 warning_at (loc, OPT_Winvalid_memory_model,
6236 "failure memory model cannot be stronger than success "
6237 "memory model for %<__atomic_compare_exchange%>");
6238 success = MEMMODEL_SEQ_CST;
6241 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6243 warning_at (loc, OPT_Winvalid_memory_model,
6244 "invalid failure memory model for "
6245 "%<__atomic_compare_exchange%>");
6246 failure = MEMMODEL_SEQ_CST;
6247 success = MEMMODEL_SEQ_CST;
6251 if (!flag_inline_atomics)
6252 return NULL_RTX;
6254 /* Expand the operands. */
6255 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6257 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6258 expect = convert_memory_address (Pmode, expect);
6259 expect = gen_rtx_MEM (mode, expect);
6260 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6262 weak = CALL_EXPR_ARG (exp, 3);
6263 is_weak = false;
6264 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6265 is_weak = true;
6267 if (target == const0_rtx)
6268 target = NULL;
6270 /* Lest the rtl backend create a race condition with an imporoper store
6271 to memory, always create a new pseudo for OLDVAL. */
6272 oldval = NULL;
6274 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6275 is_weak, success, failure))
6276 return NULL_RTX;
6278 /* Conditionally store back to EXPECT, lest we create a race condition
6279 with an improper store to memory. */
6280 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6281 the normal case where EXPECT is totally private, i.e. a register. At
6282 which point the store can be unconditional. */
6283 label = gen_label_rtx ();
6284 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6285 GET_MODE (target), 1, label);
6286 emit_move_insn (expect, oldval);
6287 emit_label (label);
6289 return target;
6292 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6293 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6294 call. The weak parameter must be dropped to match the expected parameter
6295 list and the expected argument changed from value to pointer to memory
6296 slot. */
6298 static void
6299 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6301 unsigned int z;
6302 vec<tree, va_gc> *vec;
6304 vec_alloc (vec, 5);
6305 vec->quick_push (gimple_call_arg (call, 0));
6306 tree expected = gimple_call_arg (call, 1);
6307 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6308 TREE_TYPE (expected));
6309 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6310 if (expd != x)
6311 emit_move_insn (x, expd);
6312 tree v = make_tree (TREE_TYPE (expected), x);
6313 vec->quick_push (build1 (ADDR_EXPR,
6314 build_pointer_type (TREE_TYPE (expected)), v));
6315 vec->quick_push (gimple_call_arg (call, 2));
6316 /* Skip the boolean weak parameter. */
6317 for (z = 4; z < 6; z++)
6318 vec->quick_push (gimple_call_arg (call, z));
6319 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6320 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6321 gcc_assert (bytes_log2 < 5);
6322 built_in_function fncode
6323 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6324 + bytes_log2);
6325 tree fndecl = builtin_decl_explicit (fncode);
6326 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6327 fndecl);
6328 tree exp = build_call_vec (boolean_type_node, fn, vec);
6329 tree lhs = gimple_call_lhs (call);
6330 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6331 if (lhs)
6333 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6334 if (GET_MODE (boolret) != mode)
6335 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6336 x = force_reg (mode, x);
6337 write_complex_part (target, boolret, true);
6338 write_complex_part (target, x, false);
6342 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6344 void
6345 expand_ifn_atomic_compare_exchange (gcall *call)
6347 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6348 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6349 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6350 rtx expect, desired, mem, oldval, boolret;
6351 enum memmodel success, failure;
6352 tree lhs;
6353 bool is_weak;
6354 source_location loc
6355 = expansion_point_location_if_in_system_header (gimple_location (call));
6357 success = get_memmodel (gimple_call_arg (call, 4));
6358 failure = get_memmodel (gimple_call_arg (call, 5));
6360 if (failure > success)
6362 warning_at (loc, OPT_Winvalid_memory_model,
6363 "failure memory model cannot be stronger than success "
6364 "memory model for %<__atomic_compare_exchange%>");
6365 success = MEMMODEL_SEQ_CST;
6368 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6370 warning_at (loc, OPT_Winvalid_memory_model,
6371 "invalid failure memory model for "
6372 "%<__atomic_compare_exchange%>");
6373 failure = MEMMODEL_SEQ_CST;
6374 success = MEMMODEL_SEQ_CST;
6377 if (!flag_inline_atomics)
6379 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6380 return;
6383 /* Expand the operands. */
6384 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6386 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6387 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6389 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6391 boolret = NULL;
6392 oldval = NULL;
6394 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6395 is_weak, success, failure))
6397 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6398 return;
6401 lhs = gimple_call_lhs (call);
6402 if (lhs)
6404 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6405 if (GET_MODE (boolret) != mode)
6406 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6407 write_complex_part (target, boolret, true);
6408 write_complex_part (target, oldval, false);
6412 /* Expand the __atomic_load intrinsic:
6413 TYPE __atomic_load (TYPE *object, enum memmodel)
6414 EXP is the CALL_EXPR.
6415 TARGET is an optional place for us to store the results. */
6417 static rtx
6418 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6420 rtx mem;
6421 enum memmodel model;
6423 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6424 if (is_mm_release (model) || is_mm_acq_rel (model))
6426 source_location loc
6427 = expansion_point_location_if_in_system_header (input_location);
6428 warning_at (loc, OPT_Winvalid_memory_model,
6429 "invalid memory model for %<__atomic_load%>");
6430 model = MEMMODEL_SEQ_CST;
6433 if (!flag_inline_atomics)
6434 return NULL_RTX;
6436 /* Expand the operand. */
6437 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6439 return expand_atomic_load (target, mem, model);
6443 /* Expand the __atomic_store intrinsic:
6444 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6445 EXP is the CALL_EXPR.
6446 TARGET is an optional place for us to store the results. */
6448 static rtx
6449 expand_builtin_atomic_store (machine_mode mode, tree exp)
6451 rtx mem, val;
6452 enum memmodel model;
6454 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6455 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6456 || is_mm_release (model)))
6458 source_location loc
6459 = expansion_point_location_if_in_system_header (input_location);
6460 warning_at (loc, OPT_Winvalid_memory_model,
6461 "invalid memory model for %<__atomic_store%>");
6462 model = MEMMODEL_SEQ_CST;
6465 if (!flag_inline_atomics)
6466 return NULL_RTX;
6468 /* Expand the operands. */
6469 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6470 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6472 return expand_atomic_store (mem, val, model, false);
6475 /* Expand the __atomic_fetch_XXX intrinsic:
6476 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6477 EXP is the CALL_EXPR.
6478 TARGET is an optional place for us to store the results.
6479 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6480 FETCH_AFTER is true if returning the result of the operation.
6481 FETCH_AFTER is false if returning the value before the operation.
6482 IGNORE is true if the result is not used.
6483 EXT_CALL is the correct builtin for an external call if this cannot be
6484 resolved to an instruction sequence. */
6486 static rtx
6487 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6488 enum rtx_code code, bool fetch_after,
6489 bool ignore, enum built_in_function ext_call)
6491 rtx val, mem, ret;
6492 enum memmodel model;
6493 tree fndecl;
6494 tree addr;
6496 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6498 /* Expand the operands. */
6499 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6500 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6502 /* Only try generating instructions if inlining is turned on. */
6503 if (flag_inline_atomics)
6505 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6506 if (ret)
6507 return ret;
6510 /* Return if a different routine isn't needed for the library call. */
6511 if (ext_call == BUILT_IN_NONE)
6512 return NULL_RTX;
6514 /* Change the call to the specified function. */
6515 fndecl = get_callee_fndecl (exp);
6516 addr = CALL_EXPR_FN (exp);
6517 STRIP_NOPS (addr);
6519 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6520 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6522 /* If we will emit code after the call, the call can not be a tail call.
6523 If it is emitted as a tail call, a barrier is emitted after it, and
6524 then all trailing code is removed. */
6525 if (!ignore)
6526 CALL_EXPR_TAILCALL (exp) = 0;
6528 /* Expand the call here so we can emit trailing code. */
6529 ret = expand_call (exp, target, ignore);
6531 /* Replace the original function just in case it matters. */
6532 TREE_OPERAND (addr, 0) = fndecl;
6534 /* Then issue the arithmetic correction to return the right result. */
6535 if (!ignore)
6537 if (code == NOT)
6539 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6540 OPTAB_LIB_WIDEN);
6541 ret = expand_simple_unop (mode, NOT, ret, target, true);
6543 else
6544 ret = expand_simple_binop (mode, code, ret, val, target, true,
6545 OPTAB_LIB_WIDEN);
6547 return ret;
6550 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6552 void
6553 expand_ifn_atomic_bit_test_and (gcall *call)
6555 tree ptr = gimple_call_arg (call, 0);
6556 tree bit = gimple_call_arg (call, 1);
6557 tree flag = gimple_call_arg (call, 2);
6558 tree lhs = gimple_call_lhs (call);
6559 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6560 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6561 enum rtx_code code;
6562 optab optab;
6563 struct expand_operand ops[5];
6565 gcc_assert (flag_inline_atomics);
6567 if (gimple_call_num_args (call) == 4)
6568 model = get_memmodel (gimple_call_arg (call, 3));
6570 rtx mem = get_builtin_sync_mem (ptr, mode);
6571 rtx val = expand_expr_force_mode (bit, mode);
6573 switch (gimple_call_internal_fn (call))
6575 case IFN_ATOMIC_BIT_TEST_AND_SET:
6576 code = IOR;
6577 optab = atomic_bit_test_and_set_optab;
6578 break;
6579 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6580 code = XOR;
6581 optab = atomic_bit_test_and_complement_optab;
6582 break;
6583 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6584 code = AND;
6585 optab = atomic_bit_test_and_reset_optab;
6586 break;
6587 default:
6588 gcc_unreachable ();
6591 if (lhs == NULL_TREE)
6593 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6594 val, NULL_RTX, true, OPTAB_DIRECT);
6595 if (code == AND)
6596 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6597 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6598 return;
6601 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6602 enum insn_code icode = direct_optab_handler (optab, mode);
6603 gcc_assert (icode != CODE_FOR_nothing);
6604 create_output_operand (&ops[0], target, mode);
6605 create_fixed_operand (&ops[1], mem);
6606 create_convert_operand_to (&ops[2], val, mode, true);
6607 create_integer_operand (&ops[3], model);
6608 create_integer_operand (&ops[4], integer_onep (flag));
6609 if (maybe_expand_insn (icode, 5, ops))
6610 return;
6612 rtx bitval = val;
6613 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6614 val, NULL_RTX, true, OPTAB_DIRECT);
6615 rtx maskval = val;
6616 if (code == AND)
6617 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6618 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6619 code, model, false);
6620 if (integer_onep (flag))
6622 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6623 NULL_RTX, true, OPTAB_DIRECT);
6624 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6625 true, OPTAB_DIRECT);
6627 else
6628 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6629 OPTAB_DIRECT);
6630 if (result != target)
6631 emit_move_insn (target, result);
6634 /* Expand an atomic clear operation.
6635 void _atomic_clear (BOOL *obj, enum memmodel)
6636 EXP is the call expression. */
6638 static rtx
6639 expand_builtin_atomic_clear (tree exp)
6641 machine_mode mode;
6642 rtx mem, ret;
6643 enum memmodel model;
6645 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6646 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6647 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6649 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6651 source_location loc
6652 = expansion_point_location_if_in_system_header (input_location);
6653 warning_at (loc, OPT_Winvalid_memory_model,
6654 "invalid memory model for %<__atomic_store%>");
6655 model = MEMMODEL_SEQ_CST;
6658 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6659 Failing that, a store is issued by __atomic_store. The only way this can
6660 fail is if the bool type is larger than a word size. Unlikely, but
6661 handle it anyway for completeness. Assume a single threaded model since
6662 there is no atomic support in this case, and no barriers are required. */
6663 ret = expand_atomic_store (mem, const0_rtx, model, true);
6664 if (!ret)
6665 emit_move_insn (mem, const0_rtx);
6666 return const0_rtx;
6669 /* Expand an atomic test_and_set operation.
6670 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6671 EXP is the call expression. */
6673 static rtx
6674 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6676 rtx mem;
6677 enum memmodel model;
6678 machine_mode mode;
6680 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6681 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6682 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6684 return expand_atomic_test_and_set (target, mem, model);
6688 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6689 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6691 static tree
6692 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6694 int size;
6695 machine_mode mode;
6696 unsigned int mode_align, type_align;
6698 if (TREE_CODE (arg0) != INTEGER_CST)
6699 return NULL_TREE;
6701 /* We need a corresponding integer mode for the access to be lock-free. */
6702 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6703 if (!int_mode_for_size (size, 0).exists (&mode))
6704 return boolean_false_node;
6706 mode_align = GET_MODE_ALIGNMENT (mode);
6708 if (TREE_CODE (arg1) == INTEGER_CST)
6710 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6712 /* Either this argument is null, or it's a fake pointer encoding
6713 the alignment of the object. */
6714 val = least_bit_hwi (val);
6715 val *= BITS_PER_UNIT;
6717 if (val == 0 || mode_align < val)
6718 type_align = mode_align;
6719 else
6720 type_align = val;
6722 else
6724 tree ttype = TREE_TYPE (arg1);
6726 /* This function is usually invoked and folded immediately by the front
6727 end before anything else has a chance to look at it. The pointer
6728 parameter at this point is usually cast to a void *, so check for that
6729 and look past the cast. */
6730 if (CONVERT_EXPR_P (arg1)
6731 && POINTER_TYPE_P (ttype)
6732 && VOID_TYPE_P (TREE_TYPE (ttype))
6733 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6734 arg1 = TREE_OPERAND (arg1, 0);
6736 ttype = TREE_TYPE (arg1);
6737 gcc_assert (POINTER_TYPE_P (ttype));
6739 /* Get the underlying type of the object. */
6740 ttype = TREE_TYPE (ttype);
6741 type_align = TYPE_ALIGN (ttype);
6744 /* If the object has smaller alignment, the lock free routines cannot
6745 be used. */
6746 if (type_align < mode_align)
6747 return boolean_false_node;
6749 /* Check if a compare_and_swap pattern exists for the mode which represents
6750 the required size. The pattern is not allowed to fail, so the existence
6751 of the pattern indicates support is present. Also require that an
6752 atomic load exists for the required size. */
6753 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6754 return boolean_true_node;
6755 else
6756 return boolean_false_node;
6759 /* Return true if the parameters to call EXP represent an object which will
6760 always generate lock free instructions. The first argument represents the
6761 size of the object, and the second parameter is a pointer to the object
6762 itself. If NULL is passed for the object, then the result is based on
6763 typical alignment for an object of the specified size. Otherwise return
6764 false. */
6766 static rtx
6767 expand_builtin_atomic_always_lock_free (tree exp)
6769 tree size;
6770 tree arg0 = CALL_EXPR_ARG (exp, 0);
6771 tree arg1 = CALL_EXPR_ARG (exp, 1);
6773 if (TREE_CODE (arg0) != INTEGER_CST)
6775 error ("non-constant argument 1 to __atomic_always_lock_free");
6776 return const0_rtx;
6779 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6780 if (size == boolean_true_node)
6781 return const1_rtx;
6782 return const0_rtx;
6785 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6786 is lock free on this architecture. */
6788 static tree
6789 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6791 if (!flag_inline_atomics)
6792 return NULL_TREE;
6794 /* If it isn't always lock free, don't generate a result. */
6795 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6796 return boolean_true_node;
6798 return NULL_TREE;
6801 /* Return true if the parameters to call EXP represent an object which will
6802 always generate lock free instructions. The first argument represents the
6803 size of the object, and the second parameter is a pointer to the object
6804 itself. If NULL is passed for the object, then the result is based on
6805 typical alignment for an object of the specified size. Otherwise return
6806 NULL*/
6808 static rtx
6809 expand_builtin_atomic_is_lock_free (tree exp)
6811 tree size;
6812 tree arg0 = CALL_EXPR_ARG (exp, 0);
6813 tree arg1 = CALL_EXPR_ARG (exp, 1);
6815 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6817 error ("non-integer argument 1 to __atomic_is_lock_free");
6818 return NULL_RTX;
6821 if (!flag_inline_atomics)
6822 return NULL_RTX;
6824 /* If the value is known at compile time, return the RTX for it. */
6825 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6826 if (size == boolean_true_node)
6827 return const1_rtx;
6829 return NULL_RTX;
6832 /* Expand the __atomic_thread_fence intrinsic:
6833 void __atomic_thread_fence (enum memmodel)
6834 EXP is the CALL_EXPR. */
6836 static void
6837 expand_builtin_atomic_thread_fence (tree exp)
6839 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6840 expand_mem_thread_fence (model);
6843 /* Expand the __atomic_signal_fence intrinsic:
6844 void __atomic_signal_fence (enum memmodel)
6845 EXP is the CALL_EXPR. */
6847 static void
6848 expand_builtin_atomic_signal_fence (tree exp)
6850 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6851 expand_mem_signal_fence (model);
6854 /* Expand the __sync_synchronize intrinsic. */
6856 static void
6857 expand_builtin_sync_synchronize (void)
6859 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6862 static rtx
6863 expand_builtin_thread_pointer (tree exp, rtx target)
6865 enum insn_code icode;
6866 if (!validate_arglist (exp, VOID_TYPE))
6867 return const0_rtx;
6868 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6869 if (icode != CODE_FOR_nothing)
6871 struct expand_operand op;
6872 /* If the target is not sutitable then create a new target. */
6873 if (target == NULL_RTX
6874 || !REG_P (target)
6875 || GET_MODE (target) != Pmode)
6876 target = gen_reg_rtx (Pmode);
6877 create_output_operand (&op, target, Pmode);
6878 expand_insn (icode, 1, &op);
6879 return target;
6881 error ("__builtin_thread_pointer is not supported on this target");
6882 return const0_rtx;
6885 static void
6886 expand_builtin_set_thread_pointer (tree exp)
6888 enum insn_code icode;
6889 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6890 return;
6891 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6892 if (icode != CODE_FOR_nothing)
6894 struct expand_operand op;
6895 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6896 Pmode, EXPAND_NORMAL);
6897 create_input_operand (&op, val, Pmode);
6898 expand_insn (icode, 1, &op);
6899 return;
6901 error ("__builtin_set_thread_pointer is not supported on this target");
6905 /* Emit code to restore the current value of stack. */
6907 static void
6908 expand_stack_restore (tree var)
6910 rtx_insn *prev;
6911 rtx sa = expand_normal (var);
6913 sa = convert_memory_address (Pmode, sa);
6915 prev = get_last_insn ();
6916 emit_stack_restore (SAVE_BLOCK, sa);
6918 record_new_stack_level ();
6920 fixup_args_size_notes (prev, get_last_insn (), 0);
6923 /* Emit code to save the current value of stack. */
6925 static rtx
6926 expand_stack_save (void)
6928 rtx ret = NULL_RTX;
6930 emit_stack_save (SAVE_BLOCK, &ret);
6931 return ret;
6934 /* Emit code to get the openacc gang, worker or vector id or size. */
6936 static rtx
6937 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
6939 const char *name;
6940 rtx fallback_retval;
6941 rtx_insn *(*gen_fn) (rtx, rtx);
6942 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
6944 case BUILT_IN_GOACC_PARLEVEL_ID:
6945 name = "__builtin_goacc_parlevel_id";
6946 fallback_retval = const0_rtx;
6947 gen_fn = targetm.gen_oacc_dim_pos;
6948 break;
6949 case BUILT_IN_GOACC_PARLEVEL_SIZE:
6950 name = "__builtin_goacc_parlevel_size";
6951 fallback_retval = const1_rtx;
6952 gen_fn = targetm.gen_oacc_dim_size;
6953 break;
6954 default:
6955 gcc_unreachable ();
6958 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
6960 error ("%qs only supported in OpenACC code", name);
6961 return const0_rtx;
6964 tree arg = CALL_EXPR_ARG (exp, 0);
6965 if (TREE_CODE (arg) != INTEGER_CST)
6967 error ("non-constant argument 0 to %qs", name);
6968 return const0_rtx;
6971 int dim = TREE_INT_CST_LOW (arg);
6972 switch (dim)
6974 case GOMP_DIM_GANG:
6975 case GOMP_DIM_WORKER:
6976 case GOMP_DIM_VECTOR:
6977 break;
6978 default:
6979 error ("illegal argument 0 to %qs", name);
6980 return const0_rtx;
6983 if (ignore)
6984 return target;
6986 if (target == NULL_RTX)
6987 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6989 if (!targetm.have_oacc_dim_size ())
6991 emit_move_insn (target, fallback_retval);
6992 return target;
6995 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
6996 emit_insn (gen_fn (reg, GEN_INT (dim)));
6997 if (reg != target)
6998 emit_move_insn (target, reg);
7000 return target;
7003 /* Expand a string compare operation using a sequence of char comparison
7004 to get rid of the calling overhead, with result going to TARGET if
7005 that's convenient.
7007 VAR_STR is the variable string source;
7008 CONST_STR is the constant string source;
7009 LENGTH is the number of chars to compare;
7010 CONST_STR_N indicates which source string is the constant string;
7011 IS_MEMCMP indicates whether it's a memcmp or strcmp.
7013 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7015 target = (int) (unsigned char) var_str[0]
7016 - (int) (unsigned char) const_str[0];
7017 if (target != 0)
7018 goto ne_label;
7020 target = (int) (unsigned char) var_str[length - 2]
7021 - (int) (unsigned char) const_str[length - 2];
7022 if (target != 0)
7023 goto ne_label;
7024 target = (int) (unsigned char) var_str[length - 1]
7025 - (int) (unsigned char) const_str[length - 1];
7026 ne_label:
7029 static rtx
7030 inline_string_cmp (rtx target, tree var_str, const char *const_str,
7031 unsigned HOST_WIDE_INT length,
7032 int const_str_n, machine_mode mode)
7034 HOST_WIDE_INT offset = 0;
7035 rtx var_rtx_array
7036 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
7037 rtx var_rtx = NULL_RTX;
7038 rtx const_rtx = NULL_RTX;
7039 rtx result = target ? target : gen_reg_rtx (mode);
7040 rtx_code_label *ne_label = gen_label_rtx ();
7041 tree unit_type_node = unsigned_char_type_node;
7042 scalar_int_mode unit_mode
7043 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
7045 start_sequence ();
7047 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
7049 var_rtx
7050 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
7051 const_rtx = c_readstr (const_str + offset, unit_mode);
7052 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
7053 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
7055 op0 = convert_modes (mode, unit_mode, op0, 1);
7056 op1 = convert_modes (mode, unit_mode, op1, 1);
7057 result = expand_simple_binop (mode, MINUS, op0, op1,
7058 result, 1, OPTAB_WIDEN);
7059 if (i < length - 1)
7060 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
7061 mode, true, ne_label);
7062 offset += GET_MODE_SIZE (unit_mode);
7065 emit_label (ne_label);
7066 rtx_insn *insns = get_insns ();
7067 end_sequence ();
7068 emit_insn (insns);
7070 return result;
7073 /* Inline expansion a call to str(n)cmp, with result going to
7074 TARGET if that's convenient.
7075 If the call is not been inlined, return NULL_RTX. */
7076 static rtx
7077 inline_expand_builtin_string_cmp (tree exp, rtx target)
7079 tree fndecl = get_callee_fndecl (exp);
7080 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7081 unsigned HOST_WIDE_INT length = 0;
7082 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
7084 /* Do NOT apply this inlining expansion when optimizing for size or
7085 optimization level below 2. */
7086 if (optimize < 2 || optimize_insn_for_size_p ())
7087 return NULL_RTX;
7089 gcc_checking_assert (fcode == BUILT_IN_STRCMP
7090 || fcode == BUILT_IN_STRNCMP
7091 || fcode == BUILT_IN_MEMCMP);
7093 /* On a target where the type of the call (int) has same or narrower presicion
7094 than unsigned char, give up the inlining expansion. */
7095 if (TYPE_PRECISION (unsigned_char_type_node)
7096 >= TYPE_PRECISION (TREE_TYPE (exp)))
7097 return NULL_RTX;
7099 tree arg1 = CALL_EXPR_ARG (exp, 0);
7100 tree arg2 = CALL_EXPR_ARG (exp, 1);
7101 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
7103 unsigned HOST_WIDE_INT len1 = 0;
7104 unsigned HOST_WIDE_INT len2 = 0;
7105 unsigned HOST_WIDE_INT len3 = 0;
7107 const char *src_str1 = c_getstr (arg1, &len1);
7108 const char *src_str2 = c_getstr (arg2, &len2);
7110 /* If neither strings is constant string, the call is not qualify. */
7111 if (!src_str1 && !src_str2)
7112 return NULL_RTX;
7114 /* For strncmp, if the length is not a const, not qualify. */
7115 if (is_ncmp && !tree_fits_uhwi_p (len3_tree))
7116 return NULL_RTX;
7118 int const_str_n = 0;
7119 if (!len1)
7120 const_str_n = 2;
7121 else if (!len2)
7122 const_str_n = 1;
7123 else if (len2 > len1)
7124 const_str_n = 1;
7125 else
7126 const_str_n = 2;
7128 gcc_checking_assert (const_str_n > 0);
7129 length = (const_str_n == 1) ? len1 : len2;
7131 if (is_ncmp && (len3 = tree_to_uhwi (len3_tree)) < length)
7132 length = len3;
7134 /* If the length of the comparision is larger than the threshold,
7135 do nothing. */
7136 if (length > (unsigned HOST_WIDE_INT)
7137 PARAM_VALUE (BUILTIN_STRING_CMP_INLINE_LENGTH))
7138 return NULL_RTX;
7140 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7142 /* Now, start inline expansion the call. */
7143 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
7144 (const_str_n == 1) ? src_str1 : src_str2, length,
7145 const_str_n, mode);
7148 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7149 represents the size of the first argument to that call, or VOIDmode
7150 if the argument is a pointer. IGNORE will be true if the result
7151 isn't used. */
7152 static rtx
7153 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
7154 bool ignore)
7156 rtx val, failsafe;
7157 unsigned nargs = call_expr_nargs (exp);
7159 tree arg0 = CALL_EXPR_ARG (exp, 0);
7161 if (mode == VOIDmode)
7163 mode = TYPE_MODE (TREE_TYPE (arg0));
7164 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
7167 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
7169 /* An optional second argument can be used as a failsafe value on
7170 some machines. If it isn't present, then the failsafe value is
7171 assumed to be 0. */
7172 if (nargs > 1)
7174 tree arg1 = CALL_EXPR_ARG (exp, 1);
7175 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
7177 else
7178 failsafe = const0_rtx;
7180 /* If the result isn't used, the behavior is undefined. It would be
7181 nice to emit a warning here, but path splitting means this might
7182 happen with legitimate code. So simply drop the builtin
7183 expansion in that case; we've handled any side-effects above. */
7184 if (ignore)
7185 return const0_rtx;
7187 /* If we don't have a suitable target, create one to hold the result. */
7188 if (target == NULL || GET_MODE (target) != mode)
7189 target = gen_reg_rtx (mode);
7191 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7192 val = convert_modes (mode, VOIDmode, val, false);
7194 return targetm.speculation_safe_value (mode, target, val, failsafe);
7197 /* Expand an expression EXP that calls a built-in function,
7198 with result going to TARGET if that's convenient
7199 (and in mode MODE if that's convenient).
7200 SUBTARGET may be used as the target for computing one of EXP's operands.
7201 IGNORE is nonzero if the value is to be ignored. */
7204 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7205 int ignore)
7207 tree fndecl = get_callee_fndecl (exp);
7208 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7209 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7210 int flags;
7212 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7213 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7215 /* When ASan is enabled, we don't want to expand some memory/string
7216 builtins and rely on libsanitizer's hooks. This allows us to avoid
7217 redundant checks and be sure, that possible overflow will be detected
7218 by ASan. */
7220 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7221 return expand_call (exp, target, ignore);
7223 /* When not optimizing, generate calls to library functions for a certain
7224 set of builtins. */
7225 if (!optimize
7226 && !called_as_built_in (fndecl)
7227 && fcode != BUILT_IN_FORK
7228 && fcode != BUILT_IN_EXECL
7229 && fcode != BUILT_IN_EXECV
7230 && fcode != BUILT_IN_EXECLP
7231 && fcode != BUILT_IN_EXECLE
7232 && fcode != BUILT_IN_EXECVP
7233 && fcode != BUILT_IN_EXECVE
7234 && !ALLOCA_FUNCTION_CODE_P (fcode)
7235 && fcode != BUILT_IN_FREE)
7236 return expand_call (exp, target, ignore);
7238 /* The built-in function expanders test for target == const0_rtx
7239 to determine whether the function's result will be ignored. */
7240 if (ignore)
7241 target = const0_rtx;
7243 /* If the result of a pure or const built-in function is ignored, and
7244 none of its arguments are volatile, we can avoid expanding the
7245 built-in call and just evaluate the arguments for side-effects. */
7246 if (target == const0_rtx
7247 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7248 && !(flags & ECF_LOOPING_CONST_OR_PURE))
7250 bool volatilep = false;
7251 tree arg;
7252 call_expr_arg_iterator iter;
7254 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7255 if (TREE_THIS_VOLATILE (arg))
7257 volatilep = true;
7258 break;
7261 if (! volatilep)
7263 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7264 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7265 return const0_rtx;
7269 switch (fcode)
7271 CASE_FLT_FN (BUILT_IN_FABS):
7272 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7273 case BUILT_IN_FABSD32:
7274 case BUILT_IN_FABSD64:
7275 case BUILT_IN_FABSD128:
7276 target = expand_builtin_fabs (exp, target, subtarget);
7277 if (target)
7278 return target;
7279 break;
7281 CASE_FLT_FN (BUILT_IN_COPYSIGN):
7282 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7283 target = expand_builtin_copysign (exp, target, subtarget);
7284 if (target)
7285 return target;
7286 break;
7288 /* Just do a normal library call if we were unable to fold
7289 the values. */
7290 CASE_FLT_FN (BUILT_IN_CABS):
7291 break;
7293 CASE_FLT_FN (BUILT_IN_FMA):
7294 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7295 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7296 if (target)
7297 return target;
7298 break;
7300 CASE_FLT_FN (BUILT_IN_ILOGB):
7301 if (! flag_unsafe_math_optimizations)
7302 break;
7303 gcc_fallthrough ();
7304 CASE_FLT_FN (BUILT_IN_ISINF):
7305 CASE_FLT_FN (BUILT_IN_FINITE):
7306 case BUILT_IN_ISFINITE:
7307 case BUILT_IN_ISNORMAL:
7308 target = expand_builtin_interclass_mathfn (exp, target);
7309 if (target)
7310 return target;
7311 break;
7313 CASE_FLT_FN (BUILT_IN_ICEIL):
7314 CASE_FLT_FN (BUILT_IN_LCEIL):
7315 CASE_FLT_FN (BUILT_IN_LLCEIL):
7316 CASE_FLT_FN (BUILT_IN_LFLOOR):
7317 CASE_FLT_FN (BUILT_IN_IFLOOR):
7318 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7319 target = expand_builtin_int_roundingfn (exp, target);
7320 if (target)
7321 return target;
7322 break;
7324 CASE_FLT_FN (BUILT_IN_IRINT):
7325 CASE_FLT_FN (BUILT_IN_LRINT):
7326 CASE_FLT_FN (BUILT_IN_LLRINT):
7327 CASE_FLT_FN (BUILT_IN_IROUND):
7328 CASE_FLT_FN (BUILT_IN_LROUND):
7329 CASE_FLT_FN (BUILT_IN_LLROUND):
7330 target = expand_builtin_int_roundingfn_2 (exp, target);
7331 if (target)
7332 return target;
7333 break;
7335 CASE_FLT_FN (BUILT_IN_POWI):
7336 target = expand_builtin_powi (exp, target);
7337 if (target)
7338 return target;
7339 break;
7341 CASE_FLT_FN (BUILT_IN_CEXPI):
7342 target = expand_builtin_cexpi (exp, target);
7343 gcc_assert (target);
7344 return target;
7346 CASE_FLT_FN (BUILT_IN_SIN):
7347 CASE_FLT_FN (BUILT_IN_COS):
7348 if (! flag_unsafe_math_optimizations)
7349 break;
7350 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7351 if (target)
7352 return target;
7353 break;
7355 CASE_FLT_FN (BUILT_IN_SINCOS):
7356 if (! flag_unsafe_math_optimizations)
7357 break;
7358 target = expand_builtin_sincos (exp);
7359 if (target)
7360 return target;
7361 break;
7363 case BUILT_IN_APPLY_ARGS:
7364 return expand_builtin_apply_args ();
7366 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7367 FUNCTION with a copy of the parameters described by
7368 ARGUMENTS, and ARGSIZE. It returns a block of memory
7369 allocated on the stack into which is stored all the registers
7370 that might possibly be used for returning the result of a
7371 function. ARGUMENTS is the value returned by
7372 __builtin_apply_args. ARGSIZE is the number of bytes of
7373 arguments that must be copied. ??? How should this value be
7374 computed? We'll also need a safe worst case value for varargs
7375 functions. */
7376 case BUILT_IN_APPLY:
7377 if (!validate_arglist (exp, POINTER_TYPE,
7378 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7379 && !validate_arglist (exp, REFERENCE_TYPE,
7380 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7381 return const0_rtx;
7382 else
7384 rtx ops[3];
7386 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7387 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7388 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7390 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7393 /* __builtin_return (RESULT) causes the function to return the
7394 value described by RESULT. RESULT is address of the block of
7395 memory returned by __builtin_apply. */
7396 case BUILT_IN_RETURN:
7397 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7398 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7399 return const0_rtx;
7401 case BUILT_IN_SAVEREGS:
7402 return expand_builtin_saveregs ();
7404 case BUILT_IN_VA_ARG_PACK:
7405 /* All valid uses of __builtin_va_arg_pack () are removed during
7406 inlining. */
7407 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
7408 return const0_rtx;
7410 case BUILT_IN_VA_ARG_PACK_LEN:
7411 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7412 inlining. */
7413 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
7414 return const0_rtx;
7416 /* Return the address of the first anonymous stack arg. */
7417 case BUILT_IN_NEXT_ARG:
7418 if (fold_builtin_next_arg (exp, false))
7419 return const0_rtx;
7420 return expand_builtin_next_arg ();
7422 case BUILT_IN_CLEAR_CACHE:
7423 target = expand_builtin___clear_cache (exp);
7424 if (target)
7425 return target;
7426 break;
7428 case BUILT_IN_CLASSIFY_TYPE:
7429 return expand_builtin_classify_type (exp);
7431 case BUILT_IN_CONSTANT_P:
7432 return const0_rtx;
7434 case BUILT_IN_FRAME_ADDRESS:
7435 case BUILT_IN_RETURN_ADDRESS:
7436 return expand_builtin_frame_address (fndecl, exp);
7438 /* Returns the address of the area where the structure is returned.
7439 0 otherwise. */
7440 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7441 if (call_expr_nargs (exp) != 0
7442 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7443 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7444 return const0_rtx;
7445 else
7446 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7448 CASE_BUILT_IN_ALLOCA:
7449 target = expand_builtin_alloca (exp);
7450 if (target)
7451 return target;
7452 break;
7454 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7455 return expand_asan_emit_allocas_unpoison (exp);
7457 case BUILT_IN_STACK_SAVE:
7458 return expand_stack_save ();
7460 case BUILT_IN_STACK_RESTORE:
7461 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7462 return const0_rtx;
7464 case BUILT_IN_BSWAP16:
7465 case BUILT_IN_BSWAP32:
7466 case BUILT_IN_BSWAP64:
7467 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7468 if (target)
7469 return target;
7470 break;
7472 CASE_INT_FN (BUILT_IN_FFS):
7473 target = expand_builtin_unop (target_mode, exp, target,
7474 subtarget, ffs_optab);
7475 if (target)
7476 return target;
7477 break;
7479 CASE_INT_FN (BUILT_IN_CLZ):
7480 target = expand_builtin_unop (target_mode, exp, target,
7481 subtarget, clz_optab);
7482 if (target)
7483 return target;
7484 break;
7486 CASE_INT_FN (BUILT_IN_CTZ):
7487 target = expand_builtin_unop (target_mode, exp, target,
7488 subtarget, ctz_optab);
7489 if (target)
7490 return target;
7491 break;
7493 CASE_INT_FN (BUILT_IN_CLRSB):
7494 target = expand_builtin_unop (target_mode, exp, target,
7495 subtarget, clrsb_optab);
7496 if (target)
7497 return target;
7498 break;
7500 CASE_INT_FN (BUILT_IN_POPCOUNT):
7501 target = expand_builtin_unop (target_mode, exp, target,
7502 subtarget, popcount_optab);
7503 if (target)
7504 return target;
7505 break;
7507 CASE_INT_FN (BUILT_IN_PARITY):
7508 target = expand_builtin_unop (target_mode, exp, target,
7509 subtarget, parity_optab);
7510 if (target)
7511 return target;
7512 break;
7514 case BUILT_IN_STRLEN:
7515 target = expand_builtin_strlen (exp, target, target_mode);
7516 if (target)
7517 return target;
7518 break;
7520 case BUILT_IN_STRNLEN:
7521 target = expand_builtin_strnlen (exp, target, target_mode);
7522 if (target)
7523 return target;
7524 break;
7526 case BUILT_IN_STRCAT:
7527 target = expand_builtin_strcat (exp, target);
7528 if (target)
7529 return target;
7530 break;
7532 case BUILT_IN_STRCPY:
7533 target = expand_builtin_strcpy (exp, target);
7534 if (target)
7535 return target;
7536 break;
7538 case BUILT_IN_STRNCAT:
7539 target = expand_builtin_strncat (exp, target);
7540 if (target)
7541 return target;
7542 break;
7544 case BUILT_IN_STRNCPY:
7545 target = expand_builtin_strncpy (exp, target);
7546 if (target)
7547 return target;
7548 break;
7550 case BUILT_IN_STPCPY:
7551 target = expand_builtin_stpcpy (exp, target, mode);
7552 if (target)
7553 return target;
7554 break;
7556 case BUILT_IN_STPNCPY:
7557 target = expand_builtin_stpncpy (exp, target);
7558 if (target)
7559 return target;
7560 break;
7562 case BUILT_IN_MEMCHR:
7563 target = expand_builtin_memchr (exp, target);
7564 if (target)
7565 return target;
7566 break;
7568 case BUILT_IN_MEMCPY:
7569 target = expand_builtin_memcpy (exp, target);
7570 if (target)
7571 return target;
7572 break;
7574 case BUILT_IN_MEMMOVE:
7575 target = expand_builtin_memmove (exp, target);
7576 if (target)
7577 return target;
7578 break;
7580 case BUILT_IN_MEMPCPY:
7581 target = expand_builtin_mempcpy (exp, target);
7582 if (target)
7583 return target;
7584 break;
7586 case BUILT_IN_MEMSET:
7587 target = expand_builtin_memset (exp, target, mode);
7588 if (target)
7589 return target;
7590 break;
7592 case BUILT_IN_BZERO:
7593 target = expand_builtin_bzero (exp);
7594 if (target)
7595 return target;
7596 break;
7598 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7599 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
7600 when changing it to a strcmp call. */
7601 case BUILT_IN_STRCMP_EQ:
7602 target = expand_builtin_memcmp (exp, target, true);
7603 if (target)
7604 return target;
7606 /* Change this call back to a BUILT_IN_STRCMP. */
7607 TREE_OPERAND (exp, 1)
7608 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7610 /* Delete the last parameter. */
7611 unsigned int i;
7612 vec<tree, va_gc> *arg_vec;
7613 vec_alloc (arg_vec, 2);
7614 for (i = 0; i < 2; i++)
7615 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7616 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7617 /* FALLTHROUGH */
7619 case BUILT_IN_STRCMP:
7620 target = expand_builtin_strcmp (exp, target);
7621 if (target)
7622 return target;
7623 break;
7625 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7626 back to a BUILT_IN_STRNCMP. */
7627 case BUILT_IN_STRNCMP_EQ:
7628 target = expand_builtin_memcmp (exp, target, true);
7629 if (target)
7630 return target;
7632 /* Change it back to a BUILT_IN_STRNCMP. */
7633 TREE_OPERAND (exp, 1)
7634 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7635 /* FALLTHROUGH */
7637 case BUILT_IN_STRNCMP:
7638 target = expand_builtin_strncmp (exp, target, mode);
7639 if (target)
7640 return target;
7641 break;
7643 case BUILT_IN_BCMP:
7644 case BUILT_IN_MEMCMP:
7645 case BUILT_IN_MEMCMP_EQ:
7646 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7647 if (target)
7648 return target;
7649 if (fcode == BUILT_IN_MEMCMP_EQ)
7651 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7652 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7654 break;
7656 case BUILT_IN_SETJMP:
7657 /* This should have been lowered to the builtins below. */
7658 gcc_unreachable ();
7660 case BUILT_IN_SETJMP_SETUP:
7661 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7662 and the receiver label. */
7663 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7665 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7666 VOIDmode, EXPAND_NORMAL);
7667 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7668 rtx_insn *label_r = label_rtx (label);
7670 /* This is copied from the handling of non-local gotos. */
7671 expand_builtin_setjmp_setup (buf_addr, label_r);
7672 nonlocal_goto_handler_labels
7673 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7674 nonlocal_goto_handler_labels);
7675 /* ??? Do not let expand_label treat us as such since we would
7676 not want to be both on the list of non-local labels and on
7677 the list of forced labels. */
7678 FORCED_LABEL (label) = 0;
7679 return const0_rtx;
7681 break;
7683 case BUILT_IN_SETJMP_RECEIVER:
7684 /* __builtin_setjmp_receiver is passed the receiver label. */
7685 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7687 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7688 rtx_insn *label_r = label_rtx (label);
7690 expand_builtin_setjmp_receiver (label_r);
7691 return const0_rtx;
7693 break;
7695 /* __builtin_longjmp is passed a pointer to an array of five words.
7696 It's similar to the C library longjmp function but works with
7697 __builtin_setjmp above. */
7698 case BUILT_IN_LONGJMP:
7699 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7701 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7702 VOIDmode, EXPAND_NORMAL);
7703 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7705 if (value != const1_rtx)
7707 error ("%<__builtin_longjmp%> second argument must be 1");
7708 return const0_rtx;
7711 expand_builtin_longjmp (buf_addr, value);
7712 return const0_rtx;
7714 break;
7716 case BUILT_IN_NONLOCAL_GOTO:
7717 target = expand_builtin_nonlocal_goto (exp);
7718 if (target)
7719 return target;
7720 break;
7722 /* This updates the setjmp buffer that is its argument with the value
7723 of the current stack pointer. */
7724 case BUILT_IN_UPDATE_SETJMP_BUF:
7725 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7727 rtx buf_addr
7728 = expand_normal (CALL_EXPR_ARG (exp, 0));
7730 expand_builtin_update_setjmp_buf (buf_addr);
7731 return const0_rtx;
7733 break;
7735 case BUILT_IN_TRAP:
7736 expand_builtin_trap ();
7737 return const0_rtx;
7739 case BUILT_IN_UNREACHABLE:
7740 expand_builtin_unreachable ();
7741 return const0_rtx;
7743 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7744 case BUILT_IN_SIGNBITD32:
7745 case BUILT_IN_SIGNBITD64:
7746 case BUILT_IN_SIGNBITD128:
7747 target = expand_builtin_signbit (exp, target);
7748 if (target)
7749 return target;
7750 break;
7752 /* Various hooks for the DWARF 2 __throw routine. */
7753 case BUILT_IN_UNWIND_INIT:
7754 expand_builtin_unwind_init ();
7755 return const0_rtx;
7756 case BUILT_IN_DWARF_CFA:
7757 return virtual_cfa_rtx;
7758 #ifdef DWARF2_UNWIND_INFO
7759 case BUILT_IN_DWARF_SP_COLUMN:
7760 return expand_builtin_dwarf_sp_column ();
7761 case BUILT_IN_INIT_DWARF_REG_SIZES:
7762 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7763 return const0_rtx;
7764 #endif
7765 case BUILT_IN_FROB_RETURN_ADDR:
7766 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7767 case BUILT_IN_EXTRACT_RETURN_ADDR:
7768 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7769 case BUILT_IN_EH_RETURN:
7770 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7771 CALL_EXPR_ARG (exp, 1));
7772 return const0_rtx;
7773 case BUILT_IN_EH_RETURN_DATA_REGNO:
7774 return expand_builtin_eh_return_data_regno (exp);
7775 case BUILT_IN_EXTEND_POINTER:
7776 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7777 case BUILT_IN_EH_POINTER:
7778 return expand_builtin_eh_pointer (exp);
7779 case BUILT_IN_EH_FILTER:
7780 return expand_builtin_eh_filter (exp);
7781 case BUILT_IN_EH_COPY_VALUES:
7782 return expand_builtin_eh_copy_values (exp);
7784 case BUILT_IN_VA_START:
7785 return expand_builtin_va_start (exp);
7786 case BUILT_IN_VA_END:
7787 return expand_builtin_va_end (exp);
7788 case BUILT_IN_VA_COPY:
7789 return expand_builtin_va_copy (exp);
7790 case BUILT_IN_EXPECT:
7791 return expand_builtin_expect (exp, target);
7792 case BUILT_IN_EXPECT_WITH_PROBABILITY:
7793 return expand_builtin_expect_with_probability (exp, target);
7794 case BUILT_IN_ASSUME_ALIGNED:
7795 return expand_builtin_assume_aligned (exp, target);
7796 case BUILT_IN_PREFETCH:
7797 expand_builtin_prefetch (exp);
7798 return const0_rtx;
7800 case BUILT_IN_INIT_TRAMPOLINE:
7801 return expand_builtin_init_trampoline (exp, true);
7802 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7803 return expand_builtin_init_trampoline (exp, false);
7804 case BUILT_IN_ADJUST_TRAMPOLINE:
7805 return expand_builtin_adjust_trampoline (exp);
7807 case BUILT_IN_INIT_DESCRIPTOR:
7808 return expand_builtin_init_descriptor (exp);
7809 case BUILT_IN_ADJUST_DESCRIPTOR:
7810 return expand_builtin_adjust_descriptor (exp);
7812 case BUILT_IN_FORK:
7813 case BUILT_IN_EXECL:
7814 case BUILT_IN_EXECV:
7815 case BUILT_IN_EXECLP:
7816 case BUILT_IN_EXECLE:
7817 case BUILT_IN_EXECVP:
7818 case BUILT_IN_EXECVE:
7819 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7820 if (target)
7821 return target;
7822 break;
7824 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7825 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7826 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7827 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7828 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7829 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7830 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7831 if (target)
7832 return target;
7833 break;
7835 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7836 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7837 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7838 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7839 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7840 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7841 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7842 if (target)
7843 return target;
7844 break;
7846 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7847 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7848 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7849 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7850 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7851 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7852 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7853 if (target)
7854 return target;
7855 break;
7857 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7858 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7859 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7860 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7861 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7862 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7863 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7864 if (target)
7865 return target;
7866 break;
7868 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7869 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7870 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7871 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7872 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7873 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7874 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7875 if (target)
7876 return target;
7877 break;
7879 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7880 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7881 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7882 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7883 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7884 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7885 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7886 if (target)
7887 return target;
7888 break;
7890 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7891 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7892 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7893 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7894 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7895 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7896 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7897 if (target)
7898 return target;
7899 break;
7901 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7902 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7903 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7904 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7905 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7906 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7907 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7908 if (target)
7909 return target;
7910 break;
7912 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7913 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7914 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7915 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7916 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7917 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7918 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7919 if (target)
7920 return target;
7921 break;
7923 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7924 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7925 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7926 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7927 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7928 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7929 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7930 if (target)
7931 return target;
7932 break;
7934 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7935 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7936 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7937 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7938 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7939 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7940 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7941 if (target)
7942 return target;
7943 break;
7945 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7946 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7947 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7948 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7949 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7950 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7951 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7952 if (target)
7953 return target;
7954 break;
7956 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7957 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7958 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7959 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7960 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7961 if (mode == VOIDmode)
7962 mode = TYPE_MODE (boolean_type_node);
7963 if (!target || !register_operand (target, mode))
7964 target = gen_reg_rtx (mode);
7966 mode = get_builtin_sync_mode
7967 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7968 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7969 if (target)
7970 return target;
7971 break;
7973 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7974 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7975 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7976 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7977 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7978 mode = get_builtin_sync_mode
7979 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7980 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7981 if (target)
7982 return target;
7983 break;
7985 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7986 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7987 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7988 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7989 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7990 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7991 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7992 if (target)
7993 return target;
7994 break;
7996 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7997 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7998 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7999 case BUILT_IN_SYNC_LOCK_RELEASE_8:
8000 case BUILT_IN_SYNC_LOCK_RELEASE_16:
8001 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
8002 expand_builtin_sync_lock_release (mode, exp);
8003 return const0_rtx;
8005 case BUILT_IN_SYNC_SYNCHRONIZE:
8006 expand_builtin_sync_synchronize ();
8007 return const0_rtx;
8009 case BUILT_IN_ATOMIC_EXCHANGE_1:
8010 case BUILT_IN_ATOMIC_EXCHANGE_2:
8011 case BUILT_IN_ATOMIC_EXCHANGE_4:
8012 case BUILT_IN_ATOMIC_EXCHANGE_8:
8013 case BUILT_IN_ATOMIC_EXCHANGE_16:
8014 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
8015 target = expand_builtin_atomic_exchange (mode, exp, target);
8016 if (target)
8017 return target;
8018 break;
8020 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
8021 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
8022 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
8023 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
8024 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
8026 unsigned int nargs, z;
8027 vec<tree, va_gc> *vec;
8029 mode =
8030 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
8031 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
8032 if (target)
8033 return target;
8035 /* If this is turned into an external library call, the weak parameter
8036 must be dropped to match the expected parameter list. */
8037 nargs = call_expr_nargs (exp);
8038 vec_alloc (vec, nargs - 1);
8039 for (z = 0; z < 3; z++)
8040 vec->quick_push (CALL_EXPR_ARG (exp, z));
8041 /* Skip the boolean weak parameter. */
8042 for (z = 4; z < 6; z++)
8043 vec->quick_push (CALL_EXPR_ARG (exp, z));
8044 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
8045 break;
8048 case BUILT_IN_ATOMIC_LOAD_1:
8049 case BUILT_IN_ATOMIC_LOAD_2:
8050 case BUILT_IN_ATOMIC_LOAD_4:
8051 case BUILT_IN_ATOMIC_LOAD_8:
8052 case BUILT_IN_ATOMIC_LOAD_16:
8053 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
8054 target = expand_builtin_atomic_load (mode, exp, target);
8055 if (target)
8056 return target;
8057 break;
8059 case BUILT_IN_ATOMIC_STORE_1:
8060 case BUILT_IN_ATOMIC_STORE_2:
8061 case BUILT_IN_ATOMIC_STORE_4:
8062 case BUILT_IN_ATOMIC_STORE_8:
8063 case BUILT_IN_ATOMIC_STORE_16:
8064 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
8065 target = expand_builtin_atomic_store (mode, exp);
8066 if (target)
8067 return const0_rtx;
8068 break;
8070 case BUILT_IN_ATOMIC_ADD_FETCH_1:
8071 case BUILT_IN_ATOMIC_ADD_FETCH_2:
8072 case BUILT_IN_ATOMIC_ADD_FETCH_4:
8073 case BUILT_IN_ATOMIC_ADD_FETCH_8:
8074 case BUILT_IN_ATOMIC_ADD_FETCH_16:
8076 enum built_in_function lib;
8077 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
8078 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
8079 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
8080 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
8081 ignore, lib);
8082 if (target)
8083 return target;
8084 break;
8086 case BUILT_IN_ATOMIC_SUB_FETCH_1:
8087 case BUILT_IN_ATOMIC_SUB_FETCH_2:
8088 case BUILT_IN_ATOMIC_SUB_FETCH_4:
8089 case BUILT_IN_ATOMIC_SUB_FETCH_8:
8090 case BUILT_IN_ATOMIC_SUB_FETCH_16:
8092 enum built_in_function lib;
8093 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
8094 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
8095 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
8096 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
8097 ignore, lib);
8098 if (target)
8099 return target;
8100 break;
8102 case BUILT_IN_ATOMIC_AND_FETCH_1:
8103 case BUILT_IN_ATOMIC_AND_FETCH_2:
8104 case BUILT_IN_ATOMIC_AND_FETCH_4:
8105 case BUILT_IN_ATOMIC_AND_FETCH_8:
8106 case BUILT_IN_ATOMIC_AND_FETCH_16:
8108 enum built_in_function lib;
8109 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
8110 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
8111 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
8112 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
8113 ignore, lib);
8114 if (target)
8115 return target;
8116 break;
8118 case BUILT_IN_ATOMIC_NAND_FETCH_1:
8119 case BUILT_IN_ATOMIC_NAND_FETCH_2:
8120 case BUILT_IN_ATOMIC_NAND_FETCH_4:
8121 case BUILT_IN_ATOMIC_NAND_FETCH_8:
8122 case BUILT_IN_ATOMIC_NAND_FETCH_16:
8124 enum built_in_function lib;
8125 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
8126 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
8127 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
8128 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
8129 ignore, lib);
8130 if (target)
8131 return target;
8132 break;
8134 case BUILT_IN_ATOMIC_XOR_FETCH_1:
8135 case BUILT_IN_ATOMIC_XOR_FETCH_2:
8136 case BUILT_IN_ATOMIC_XOR_FETCH_4:
8137 case BUILT_IN_ATOMIC_XOR_FETCH_8:
8138 case BUILT_IN_ATOMIC_XOR_FETCH_16:
8140 enum built_in_function lib;
8141 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
8142 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
8143 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
8144 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
8145 ignore, lib);
8146 if (target)
8147 return target;
8148 break;
8150 case BUILT_IN_ATOMIC_OR_FETCH_1:
8151 case BUILT_IN_ATOMIC_OR_FETCH_2:
8152 case BUILT_IN_ATOMIC_OR_FETCH_4:
8153 case BUILT_IN_ATOMIC_OR_FETCH_8:
8154 case BUILT_IN_ATOMIC_OR_FETCH_16:
8156 enum built_in_function lib;
8157 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
8158 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
8159 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
8160 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
8161 ignore, lib);
8162 if (target)
8163 return target;
8164 break;
8166 case BUILT_IN_ATOMIC_FETCH_ADD_1:
8167 case BUILT_IN_ATOMIC_FETCH_ADD_2:
8168 case BUILT_IN_ATOMIC_FETCH_ADD_4:
8169 case BUILT_IN_ATOMIC_FETCH_ADD_8:
8170 case BUILT_IN_ATOMIC_FETCH_ADD_16:
8171 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
8172 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
8173 ignore, BUILT_IN_NONE);
8174 if (target)
8175 return target;
8176 break;
8178 case BUILT_IN_ATOMIC_FETCH_SUB_1:
8179 case BUILT_IN_ATOMIC_FETCH_SUB_2:
8180 case BUILT_IN_ATOMIC_FETCH_SUB_4:
8181 case BUILT_IN_ATOMIC_FETCH_SUB_8:
8182 case BUILT_IN_ATOMIC_FETCH_SUB_16:
8183 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
8184 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
8185 ignore, BUILT_IN_NONE);
8186 if (target)
8187 return target;
8188 break;
8190 case BUILT_IN_ATOMIC_FETCH_AND_1:
8191 case BUILT_IN_ATOMIC_FETCH_AND_2:
8192 case BUILT_IN_ATOMIC_FETCH_AND_4:
8193 case BUILT_IN_ATOMIC_FETCH_AND_8:
8194 case BUILT_IN_ATOMIC_FETCH_AND_16:
8195 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8196 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8197 ignore, BUILT_IN_NONE);
8198 if (target)
8199 return target;
8200 break;
8202 case BUILT_IN_ATOMIC_FETCH_NAND_1:
8203 case BUILT_IN_ATOMIC_FETCH_NAND_2:
8204 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8205 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8206 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8207 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8208 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8209 ignore, BUILT_IN_NONE);
8210 if (target)
8211 return target;
8212 break;
8214 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8215 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8216 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8217 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8218 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8219 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8220 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8221 ignore, BUILT_IN_NONE);
8222 if (target)
8223 return target;
8224 break;
8226 case BUILT_IN_ATOMIC_FETCH_OR_1:
8227 case BUILT_IN_ATOMIC_FETCH_OR_2:
8228 case BUILT_IN_ATOMIC_FETCH_OR_4:
8229 case BUILT_IN_ATOMIC_FETCH_OR_8:
8230 case BUILT_IN_ATOMIC_FETCH_OR_16:
8231 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8232 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8233 ignore, BUILT_IN_NONE);
8234 if (target)
8235 return target;
8236 break;
8238 case BUILT_IN_ATOMIC_TEST_AND_SET:
8239 return expand_builtin_atomic_test_and_set (exp, target);
8241 case BUILT_IN_ATOMIC_CLEAR:
8242 return expand_builtin_atomic_clear (exp);
8244 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8245 return expand_builtin_atomic_always_lock_free (exp);
8247 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8248 target = expand_builtin_atomic_is_lock_free (exp);
8249 if (target)
8250 return target;
8251 break;
8253 case BUILT_IN_ATOMIC_THREAD_FENCE:
8254 expand_builtin_atomic_thread_fence (exp);
8255 return const0_rtx;
8257 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8258 expand_builtin_atomic_signal_fence (exp);
8259 return const0_rtx;
8261 case BUILT_IN_OBJECT_SIZE:
8262 return expand_builtin_object_size (exp);
8264 case BUILT_IN_MEMCPY_CHK:
8265 case BUILT_IN_MEMPCPY_CHK:
8266 case BUILT_IN_MEMMOVE_CHK:
8267 case BUILT_IN_MEMSET_CHK:
8268 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8269 if (target)
8270 return target;
8271 break;
8273 case BUILT_IN_STRCPY_CHK:
8274 case BUILT_IN_STPCPY_CHK:
8275 case BUILT_IN_STRNCPY_CHK:
8276 case BUILT_IN_STPNCPY_CHK:
8277 case BUILT_IN_STRCAT_CHK:
8278 case BUILT_IN_STRNCAT_CHK:
8279 case BUILT_IN_SNPRINTF_CHK:
8280 case BUILT_IN_VSNPRINTF_CHK:
8281 maybe_emit_chk_warning (exp, fcode);
8282 break;
8284 case BUILT_IN_SPRINTF_CHK:
8285 case BUILT_IN_VSPRINTF_CHK:
8286 maybe_emit_sprintf_chk_warning (exp, fcode);
8287 break;
8289 case BUILT_IN_FREE:
8290 if (warn_free_nonheap_object)
8291 maybe_emit_free_warning (exp);
8292 break;
8294 case BUILT_IN_THREAD_POINTER:
8295 return expand_builtin_thread_pointer (exp, target);
8297 case BUILT_IN_SET_THREAD_POINTER:
8298 expand_builtin_set_thread_pointer (exp);
8299 return const0_rtx;
8301 case BUILT_IN_ACC_ON_DEVICE:
8302 /* Do library call, if we failed to expand the builtin when
8303 folding. */
8304 break;
8306 case BUILT_IN_GOACC_PARLEVEL_ID:
8307 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8308 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8310 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8311 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8313 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8314 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8315 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8316 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8317 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8318 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8319 return expand_speculation_safe_value (mode, exp, target, ignore);
8321 default: /* just do library call, if unknown builtin */
8322 break;
8325 /* The switch statement above can drop through to cause the function
8326 to be called normally. */
8327 return expand_call (exp, target, ignore);
8330 /* Determine whether a tree node represents a call to a built-in
8331 function. If the tree T is a call to a built-in function with
8332 the right number of arguments of the appropriate types, return
8333 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8334 Otherwise the return value is END_BUILTINS. */
8336 enum built_in_function
8337 builtin_mathfn_code (const_tree t)
8339 const_tree fndecl, arg, parmlist;
8340 const_tree argtype, parmtype;
8341 const_call_expr_arg_iterator iter;
8343 if (TREE_CODE (t) != CALL_EXPR)
8344 return END_BUILTINS;
8346 fndecl = get_callee_fndecl (t);
8347 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8348 return END_BUILTINS;
8350 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8351 init_const_call_expr_arg_iterator (t, &iter);
8352 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8354 /* If a function doesn't take a variable number of arguments,
8355 the last element in the list will have type `void'. */
8356 parmtype = TREE_VALUE (parmlist);
8357 if (VOID_TYPE_P (parmtype))
8359 if (more_const_call_expr_args_p (&iter))
8360 return END_BUILTINS;
8361 return DECL_FUNCTION_CODE (fndecl);
8364 if (! more_const_call_expr_args_p (&iter))
8365 return END_BUILTINS;
8367 arg = next_const_call_expr_arg (&iter);
8368 argtype = TREE_TYPE (arg);
8370 if (SCALAR_FLOAT_TYPE_P (parmtype))
8372 if (! SCALAR_FLOAT_TYPE_P (argtype))
8373 return END_BUILTINS;
8375 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8377 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8378 return END_BUILTINS;
8380 else if (POINTER_TYPE_P (parmtype))
8382 if (! POINTER_TYPE_P (argtype))
8383 return END_BUILTINS;
8385 else if (INTEGRAL_TYPE_P (parmtype))
8387 if (! INTEGRAL_TYPE_P (argtype))
8388 return END_BUILTINS;
8390 else
8391 return END_BUILTINS;
8394 /* Variable-length argument list. */
8395 return DECL_FUNCTION_CODE (fndecl);
8398 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8399 evaluate to a constant. */
8401 static tree
8402 fold_builtin_constant_p (tree arg)
8404 /* We return 1 for a numeric type that's known to be a constant
8405 value at compile-time or for an aggregate type that's a
8406 literal constant. */
8407 STRIP_NOPS (arg);
8409 /* If we know this is a constant, emit the constant of one. */
8410 if (CONSTANT_CLASS_P (arg)
8411 || (TREE_CODE (arg) == CONSTRUCTOR
8412 && TREE_CONSTANT (arg)))
8413 return integer_one_node;
8414 if (TREE_CODE (arg) == ADDR_EXPR)
8416 tree op = TREE_OPERAND (arg, 0);
8417 if (TREE_CODE (op) == STRING_CST
8418 || (TREE_CODE (op) == ARRAY_REF
8419 && integer_zerop (TREE_OPERAND (op, 1))
8420 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8421 return integer_one_node;
8424 /* If this expression has side effects, show we don't know it to be a
8425 constant. Likewise if it's a pointer or aggregate type since in
8426 those case we only want literals, since those are only optimized
8427 when generating RTL, not later.
8428 And finally, if we are compiling an initializer, not code, we
8429 need to return a definite result now; there's not going to be any
8430 more optimization done. */
8431 if (TREE_SIDE_EFFECTS (arg)
8432 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8433 || POINTER_TYPE_P (TREE_TYPE (arg))
8434 || cfun == 0
8435 || folding_initializer
8436 || force_folding_builtin_constant_p)
8437 return integer_zero_node;
8439 return NULL_TREE;
8442 /* Create builtin_expect or builtin_expect_with_probability
8443 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8444 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8445 builtin_expect_with_probability instead uses third argument as PROBABILITY
8446 value. */
8448 static tree
8449 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8450 tree predictor, tree probability)
8452 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
8454 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
8455 : BUILT_IN_EXPECT_WITH_PROBABILITY);
8456 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8457 ret_type = TREE_TYPE (TREE_TYPE (fn));
8458 pred_type = TREE_VALUE (arg_types);
8459 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8461 pred = fold_convert_loc (loc, pred_type, pred);
8462 expected = fold_convert_loc (loc, expected_type, expected);
8464 if (probability)
8465 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
8466 else
8467 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8468 predictor);
8470 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8471 build_int_cst (ret_type, 0));
8474 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
8475 NULL_TREE if no simplification is possible. */
8477 tree
8478 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
8479 tree arg3)
8481 tree inner, fndecl, inner_arg0;
8482 enum tree_code code;
8484 /* Distribute the expected value over short-circuiting operators.
8485 See through the cast from truthvalue_type_node to long. */
8486 inner_arg0 = arg0;
8487 while (CONVERT_EXPR_P (inner_arg0)
8488 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8489 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8490 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8492 /* If this is a builtin_expect within a builtin_expect keep the
8493 inner one. See through a comparison against a constant. It
8494 might have been added to create a thruthvalue. */
8495 inner = inner_arg0;
8497 if (COMPARISON_CLASS_P (inner)
8498 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8499 inner = TREE_OPERAND (inner, 0);
8501 if (TREE_CODE (inner) == CALL_EXPR
8502 && (fndecl = get_callee_fndecl (inner))
8503 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
8504 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
8505 return arg0;
8507 inner = inner_arg0;
8508 code = TREE_CODE (inner);
8509 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8511 tree op0 = TREE_OPERAND (inner, 0);
8512 tree op1 = TREE_OPERAND (inner, 1);
8513 arg1 = save_expr (arg1);
8515 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
8516 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
8517 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8519 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8522 /* If the argument isn't invariant then there's nothing else we can do. */
8523 if (!TREE_CONSTANT (inner_arg0))
8524 return NULL_TREE;
8526 /* If we expect that a comparison against the argument will fold to
8527 a constant return the constant. In practice, this means a true
8528 constant or the address of a non-weak symbol. */
8529 inner = inner_arg0;
8530 STRIP_NOPS (inner);
8531 if (TREE_CODE (inner) == ADDR_EXPR)
8535 inner = TREE_OPERAND (inner, 0);
8537 while (TREE_CODE (inner) == COMPONENT_REF
8538 || TREE_CODE (inner) == ARRAY_REF);
8539 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8540 return NULL_TREE;
8543 /* Otherwise, ARG0 already has the proper type for the return value. */
8544 return arg0;
8547 /* Fold a call to __builtin_classify_type with argument ARG. */
8549 static tree
8550 fold_builtin_classify_type (tree arg)
8552 if (arg == 0)
8553 return build_int_cst (integer_type_node, no_type_class);
8555 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8558 /* Fold a call to __builtin_strlen with argument ARG. */
8560 static tree
8561 fold_builtin_strlen (location_t loc, tree type, tree arg)
8563 if (!validate_arg (arg, POINTER_TYPE))
8564 return NULL_TREE;
8565 else
8567 c_strlen_data data;
8568 memset (&data, 0, sizeof (c_strlen_data));
8569 tree len = c_strlen (arg, 0, &data);
8571 if (len)
8572 return fold_convert_loc (loc, type, len);
8574 if (!data.decl)
8575 c_strlen (arg, 1, &data);
8577 if (data.decl)
8579 if (EXPR_HAS_LOCATION (arg))
8580 loc = EXPR_LOCATION (arg);
8581 else if (loc == UNKNOWN_LOCATION)
8582 loc = input_location;
8583 warn_string_no_nul (loc, "strlen", arg, data.decl);
8586 return NULL_TREE;
8590 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8592 static tree
8593 fold_builtin_inf (location_t loc, tree type, int warn)
8595 REAL_VALUE_TYPE real;
8597 /* __builtin_inff is intended to be usable to define INFINITY on all
8598 targets. If an infinity is not available, INFINITY expands "to a
8599 positive constant of type float that overflows at translation
8600 time", footnote "In this case, using INFINITY will violate the
8601 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8602 Thus we pedwarn to ensure this constraint violation is
8603 diagnosed. */
8604 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8605 pedwarn (loc, 0, "target format does not support infinity");
8607 real_inf (&real);
8608 return build_real (type, real);
8611 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8612 NULL_TREE if no simplification can be made. */
8614 static tree
8615 fold_builtin_sincos (location_t loc,
8616 tree arg0, tree arg1, tree arg2)
8618 tree type;
8619 tree fndecl, call = NULL_TREE;
8621 if (!validate_arg (arg0, REAL_TYPE)
8622 || !validate_arg (arg1, POINTER_TYPE)
8623 || !validate_arg (arg2, POINTER_TYPE))
8624 return NULL_TREE;
8626 type = TREE_TYPE (arg0);
8628 /* Calculate the result when the argument is a constant. */
8629 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8630 if (fn == END_BUILTINS)
8631 return NULL_TREE;
8633 /* Canonicalize sincos to cexpi. */
8634 if (TREE_CODE (arg0) == REAL_CST)
8636 tree complex_type = build_complex_type (type);
8637 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8639 if (!call)
8641 if (!targetm.libc_has_function (function_c99_math_complex)
8642 || !builtin_decl_implicit_p (fn))
8643 return NULL_TREE;
8644 fndecl = builtin_decl_explicit (fn);
8645 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8646 call = builtin_save_expr (call);
8649 tree ptype = build_pointer_type (type);
8650 arg1 = fold_convert (ptype, arg1);
8651 arg2 = fold_convert (ptype, arg2);
8652 return build2 (COMPOUND_EXPR, void_type_node,
8653 build2 (MODIFY_EXPR, void_type_node,
8654 build_fold_indirect_ref_loc (loc, arg1),
8655 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8656 build2 (MODIFY_EXPR, void_type_node,
8657 build_fold_indirect_ref_loc (loc, arg2),
8658 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8661 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8662 Return NULL_TREE if no simplification can be made. */
8664 static tree
8665 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8667 if (!validate_arg (arg1, POINTER_TYPE)
8668 || !validate_arg (arg2, POINTER_TYPE)
8669 || !validate_arg (len, INTEGER_TYPE))
8670 return NULL_TREE;
8672 /* If the LEN parameter is zero, return zero. */
8673 if (integer_zerop (len))
8674 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8675 arg1, arg2);
8677 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8678 if (operand_equal_p (arg1, arg2, 0))
8679 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8681 /* If len parameter is one, return an expression corresponding to
8682 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8683 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8685 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8686 tree cst_uchar_ptr_node
8687 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8689 tree ind1
8690 = fold_convert_loc (loc, integer_type_node,
8691 build1 (INDIRECT_REF, cst_uchar_node,
8692 fold_convert_loc (loc,
8693 cst_uchar_ptr_node,
8694 arg1)));
8695 tree ind2
8696 = fold_convert_loc (loc, integer_type_node,
8697 build1 (INDIRECT_REF, cst_uchar_node,
8698 fold_convert_loc (loc,
8699 cst_uchar_ptr_node,
8700 arg2)));
8701 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8704 return NULL_TREE;
8707 /* Fold a call to builtin isascii with argument ARG. */
8709 static tree
8710 fold_builtin_isascii (location_t loc, tree arg)
8712 if (!validate_arg (arg, INTEGER_TYPE))
8713 return NULL_TREE;
8714 else
8716 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8717 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8718 build_int_cst (integer_type_node,
8719 ~ (unsigned HOST_WIDE_INT) 0x7f));
8720 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8721 arg, integer_zero_node);
8725 /* Fold a call to builtin toascii with argument ARG. */
8727 static tree
8728 fold_builtin_toascii (location_t loc, tree arg)
8730 if (!validate_arg (arg, INTEGER_TYPE))
8731 return NULL_TREE;
8733 /* Transform toascii(c) -> (c & 0x7f). */
8734 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8735 build_int_cst (integer_type_node, 0x7f));
8738 /* Fold a call to builtin isdigit with argument ARG. */
8740 static tree
8741 fold_builtin_isdigit (location_t loc, tree arg)
8743 if (!validate_arg (arg, INTEGER_TYPE))
8744 return NULL_TREE;
8745 else
8747 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8748 /* According to the C standard, isdigit is unaffected by locale.
8749 However, it definitely is affected by the target character set. */
8750 unsigned HOST_WIDE_INT target_digit0
8751 = lang_hooks.to_target_charset ('0');
8753 if (target_digit0 == 0)
8754 return NULL_TREE;
8756 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8757 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8758 build_int_cst (unsigned_type_node, target_digit0));
8759 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8760 build_int_cst (unsigned_type_node, 9));
8764 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8766 static tree
8767 fold_builtin_fabs (location_t loc, tree arg, tree type)
8769 if (!validate_arg (arg, REAL_TYPE))
8770 return NULL_TREE;
8772 arg = fold_convert_loc (loc, type, arg);
8773 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8776 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8778 static tree
8779 fold_builtin_abs (location_t loc, tree arg, tree type)
8781 if (!validate_arg (arg, INTEGER_TYPE))
8782 return NULL_TREE;
8784 arg = fold_convert_loc (loc, type, arg);
8785 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8788 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8790 static tree
8791 fold_builtin_carg (location_t loc, tree arg, tree type)
8793 if (validate_arg (arg, COMPLEX_TYPE)
8794 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8796 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8798 if (atan2_fn)
8800 tree new_arg = builtin_save_expr (arg);
8801 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8802 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8803 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8807 return NULL_TREE;
8810 /* Fold a call to builtin frexp, we can assume the base is 2. */
8812 static tree
8813 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8815 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8816 return NULL_TREE;
8818 STRIP_NOPS (arg0);
8820 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8821 return NULL_TREE;
8823 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8825 /* Proceed if a valid pointer type was passed in. */
8826 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8828 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8829 tree frac, exp;
8831 switch (value->cl)
8833 case rvc_zero:
8834 /* For +-0, return (*exp = 0, +-0). */
8835 exp = integer_zero_node;
8836 frac = arg0;
8837 break;
8838 case rvc_nan:
8839 case rvc_inf:
8840 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8841 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8842 case rvc_normal:
8844 /* Since the frexp function always expects base 2, and in
8845 GCC normalized significands are already in the range
8846 [0.5, 1.0), we have exactly what frexp wants. */
8847 REAL_VALUE_TYPE frac_rvt = *value;
8848 SET_REAL_EXP (&frac_rvt, 0);
8849 frac = build_real (rettype, frac_rvt);
8850 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8852 break;
8853 default:
8854 gcc_unreachable ();
8857 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8858 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8859 TREE_SIDE_EFFECTS (arg1) = 1;
8860 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8863 return NULL_TREE;
8866 /* Fold a call to builtin modf. */
8868 static tree
8869 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8871 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8872 return NULL_TREE;
8874 STRIP_NOPS (arg0);
8876 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8877 return NULL_TREE;
8879 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8881 /* Proceed if a valid pointer type was passed in. */
8882 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8884 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8885 REAL_VALUE_TYPE trunc, frac;
8887 switch (value->cl)
8889 case rvc_nan:
8890 case rvc_zero:
8891 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8892 trunc = frac = *value;
8893 break;
8894 case rvc_inf:
8895 /* For +-Inf, return (*arg1 = arg0, +-0). */
8896 frac = dconst0;
8897 frac.sign = value->sign;
8898 trunc = *value;
8899 break;
8900 case rvc_normal:
8901 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8902 real_trunc (&trunc, VOIDmode, value);
8903 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8904 /* If the original number was negative and already
8905 integral, then the fractional part is -0.0. */
8906 if (value->sign && frac.cl == rvc_zero)
8907 frac.sign = value->sign;
8908 break;
8911 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8912 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8913 build_real (rettype, trunc));
8914 TREE_SIDE_EFFECTS (arg1) = 1;
8915 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8916 build_real (rettype, frac));
8919 return NULL_TREE;
8922 /* Given a location LOC, an interclass builtin function decl FNDECL
8923 and its single argument ARG, return an folded expression computing
8924 the same, or NULL_TREE if we either couldn't or didn't want to fold
8925 (the latter happen if there's an RTL instruction available). */
8927 static tree
8928 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8930 machine_mode mode;
8932 if (!validate_arg (arg, REAL_TYPE))
8933 return NULL_TREE;
8935 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8936 return NULL_TREE;
8938 mode = TYPE_MODE (TREE_TYPE (arg));
8940 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8942 /* If there is no optab, try generic code. */
8943 switch (DECL_FUNCTION_CODE (fndecl))
8945 tree result;
8947 CASE_FLT_FN (BUILT_IN_ISINF):
8949 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8950 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8951 tree type = TREE_TYPE (arg);
8952 REAL_VALUE_TYPE r;
8953 char buf[128];
8955 if (is_ibm_extended)
8957 /* NaN and Inf are encoded in the high-order double value
8958 only. The low-order value is not significant. */
8959 type = double_type_node;
8960 mode = DFmode;
8961 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8963 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8964 real_from_string (&r, buf);
8965 result = build_call_expr (isgr_fn, 2,
8966 fold_build1_loc (loc, ABS_EXPR, type, arg),
8967 build_real (type, r));
8968 return result;
8970 CASE_FLT_FN (BUILT_IN_FINITE):
8971 case BUILT_IN_ISFINITE:
8973 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8974 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8975 tree type = TREE_TYPE (arg);
8976 REAL_VALUE_TYPE r;
8977 char buf[128];
8979 if (is_ibm_extended)
8981 /* NaN and Inf are encoded in the high-order double value
8982 only. The low-order value is not significant. */
8983 type = double_type_node;
8984 mode = DFmode;
8985 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8987 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8988 real_from_string (&r, buf);
8989 result = build_call_expr (isle_fn, 2,
8990 fold_build1_loc (loc, ABS_EXPR, type, arg),
8991 build_real (type, r));
8992 /*result = fold_build2_loc (loc, UNGT_EXPR,
8993 TREE_TYPE (TREE_TYPE (fndecl)),
8994 fold_build1_loc (loc, ABS_EXPR, type, arg),
8995 build_real (type, r));
8996 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8997 TREE_TYPE (TREE_TYPE (fndecl)),
8998 result);*/
8999 return result;
9001 case BUILT_IN_ISNORMAL:
9003 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9004 islessequal(fabs(x),DBL_MAX). */
9005 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9006 tree type = TREE_TYPE (arg);
9007 tree orig_arg, max_exp, min_exp;
9008 machine_mode orig_mode = mode;
9009 REAL_VALUE_TYPE rmax, rmin;
9010 char buf[128];
9012 orig_arg = arg = builtin_save_expr (arg);
9013 if (is_ibm_extended)
9015 /* Use double to test the normal range of IBM extended
9016 precision. Emin for IBM extended precision is
9017 different to emin for IEEE double, being 53 higher
9018 since the low double exponent is at least 53 lower
9019 than the high double exponent. */
9020 type = double_type_node;
9021 mode = DFmode;
9022 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9024 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
9026 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9027 real_from_string (&rmax, buf);
9028 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9029 real_from_string (&rmin, buf);
9030 max_exp = build_real (type, rmax);
9031 min_exp = build_real (type, rmin);
9033 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
9034 if (is_ibm_extended)
9036 /* Testing the high end of the range is done just using
9037 the high double, using the same test as isfinite().
9038 For the subnormal end of the range we first test the
9039 high double, then if its magnitude is equal to the
9040 limit of 0x1p-969, we test whether the low double is
9041 non-zero and opposite sign to the high double. */
9042 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
9043 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9044 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
9045 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
9046 arg, min_exp);
9047 tree as_complex = build1 (VIEW_CONVERT_EXPR,
9048 complex_double_type_node, orig_arg);
9049 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
9050 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
9051 tree zero = build_real (type, dconst0);
9052 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
9053 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
9054 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
9055 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
9056 fold_build3 (COND_EXPR,
9057 integer_type_node,
9058 hilt, logt, lolt));
9059 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
9060 eq_min, ok_lo);
9061 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
9062 gt_min, eq_min);
9064 else
9066 tree const isge_fn
9067 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9068 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
9070 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
9071 max_exp, min_exp);
9072 return result;
9074 default:
9075 break;
9078 return NULL_TREE;
9081 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9082 ARG is the argument for the call. */
9084 static tree
9085 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9087 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9089 if (!validate_arg (arg, REAL_TYPE))
9090 return NULL_TREE;
9092 switch (builtin_index)
9094 case BUILT_IN_ISINF:
9095 if (!HONOR_INFINITIES (arg))
9096 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9098 return NULL_TREE;
9100 case BUILT_IN_ISINF_SIGN:
9102 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9103 /* In a boolean context, GCC will fold the inner COND_EXPR to
9104 1. So e.g. "if (isinf_sign(x))" would be folded to just
9105 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9106 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
9107 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9108 tree tmp = NULL_TREE;
9110 arg = builtin_save_expr (arg);
9112 if (signbit_fn && isinf_fn)
9114 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9115 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9117 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9118 signbit_call, integer_zero_node);
9119 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9120 isinf_call, integer_zero_node);
9122 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9123 integer_minus_one_node, integer_one_node);
9124 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9125 isinf_call, tmp,
9126 integer_zero_node);
9129 return tmp;
9132 case BUILT_IN_ISFINITE:
9133 if (!HONOR_NANS (arg)
9134 && !HONOR_INFINITIES (arg))
9135 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9137 return NULL_TREE;
9139 case BUILT_IN_ISNAN:
9140 if (!HONOR_NANS (arg))
9141 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9144 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
9145 if (is_ibm_extended)
9147 /* NaN and Inf are encoded in the high-order double value
9148 only. The low-order value is not significant. */
9149 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9152 arg = builtin_save_expr (arg);
9153 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9155 default:
9156 gcc_unreachable ();
9160 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9161 This builtin will generate code to return the appropriate floating
9162 point classification depending on the value of the floating point
9163 number passed in. The possible return values must be supplied as
9164 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9165 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9166 one floating point argument which is "type generic". */
9168 static tree
9169 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9171 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9172 arg, type, res, tmp;
9173 machine_mode mode;
9174 REAL_VALUE_TYPE r;
9175 char buf[128];
9177 /* Verify the required arguments in the original call. */
9178 if (nargs != 6
9179 || !validate_arg (args[0], INTEGER_TYPE)
9180 || !validate_arg (args[1], INTEGER_TYPE)
9181 || !validate_arg (args[2], INTEGER_TYPE)
9182 || !validate_arg (args[3], INTEGER_TYPE)
9183 || !validate_arg (args[4], INTEGER_TYPE)
9184 || !validate_arg (args[5], REAL_TYPE))
9185 return NULL_TREE;
9187 fp_nan = args[0];
9188 fp_infinite = args[1];
9189 fp_normal = args[2];
9190 fp_subnormal = args[3];
9191 fp_zero = args[4];
9192 arg = args[5];
9193 type = TREE_TYPE (arg);
9194 mode = TYPE_MODE (type);
9195 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9197 /* fpclassify(x) ->
9198 isnan(x) ? FP_NAN :
9199 (fabs(x) == Inf ? FP_INFINITE :
9200 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9201 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9203 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9204 build_real (type, dconst0));
9205 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9206 tmp, fp_zero, fp_subnormal);
9208 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9209 real_from_string (&r, buf);
9210 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9211 arg, build_real (type, r));
9212 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9214 if (HONOR_INFINITIES (mode))
9216 real_inf (&r);
9217 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9218 build_real (type, r));
9219 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9220 fp_infinite, res);
9223 if (HONOR_NANS (mode))
9225 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9226 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9229 return res;
9232 /* Fold a call to an unordered comparison function such as
9233 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9234 being called and ARG0 and ARG1 are the arguments for the call.
9235 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9236 the opposite of the desired result. UNORDERED_CODE is used
9237 for modes that can hold NaNs and ORDERED_CODE is used for
9238 the rest. */
9240 static tree
9241 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9242 enum tree_code unordered_code,
9243 enum tree_code ordered_code)
9245 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9246 enum tree_code code;
9247 tree type0, type1;
9248 enum tree_code code0, code1;
9249 tree cmp_type = NULL_TREE;
9251 type0 = TREE_TYPE (arg0);
9252 type1 = TREE_TYPE (arg1);
9254 code0 = TREE_CODE (type0);
9255 code1 = TREE_CODE (type1);
9257 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9258 /* Choose the wider of two real types. */
9259 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9260 ? type0 : type1;
9261 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9262 cmp_type = type0;
9263 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9264 cmp_type = type1;
9266 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9267 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9269 if (unordered_code == UNORDERED_EXPR)
9271 if (!HONOR_NANS (arg0))
9272 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9273 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9276 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9277 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9278 fold_build2_loc (loc, code, type, arg0, arg1));
9281 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9282 arithmetics if it can never overflow, or into internal functions that
9283 return both result of arithmetics and overflowed boolean flag in
9284 a complex integer result, or some other check for overflow.
9285 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9286 checking part of that. */
9288 static tree
9289 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9290 tree arg0, tree arg1, tree arg2)
9292 enum internal_fn ifn = IFN_LAST;
9293 /* The code of the expression corresponding to the type-generic
9294 built-in, or ERROR_MARK for the type-specific ones. */
9295 enum tree_code opcode = ERROR_MARK;
9296 bool ovf_only = false;
9298 switch (fcode)
9300 case BUILT_IN_ADD_OVERFLOW_P:
9301 ovf_only = true;
9302 /* FALLTHRU */
9303 case BUILT_IN_ADD_OVERFLOW:
9304 opcode = PLUS_EXPR;
9305 /* FALLTHRU */
9306 case BUILT_IN_SADD_OVERFLOW:
9307 case BUILT_IN_SADDL_OVERFLOW:
9308 case BUILT_IN_SADDLL_OVERFLOW:
9309 case BUILT_IN_UADD_OVERFLOW:
9310 case BUILT_IN_UADDL_OVERFLOW:
9311 case BUILT_IN_UADDLL_OVERFLOW:
9312 ifn = IFN_ADD_OVERFLOW;
9313 break;
9314 case BUILT_IN_SUB_OVERFLOW_P:
9315 ovf_only = true;
9316 /* FALLTHRU */
9317 case BUILT_IN_SUB_OVERFLOW:
9318 opcode = MINUS_EXPR;
9319 /* FALLTHRU */
9320 case BUILT_IN_SSUB_OVERFLOW:
9321 case BUILT_IN_SSUBL_OVERFLOW:
9322 case BUILT_IN_SSUBLL_OVERFLOW:
9323 case BUILT_IN_USUB_OVERFLOW:
9324 case BUILT_IN_USUBL_OVERFLOW:
9325 case BUILT_IN_USUBLL_OVERFLOW:
9326 ifn = IFN_SUB_OVERFLOW;
9327 break;
9328 case BUILT_IN_MUL_OVERFLOW_P:
9329 ovf_only = true;
9330 /* FALLTHRU */
9331 case BUILT_IN_MUL_OVERFLOW:
9332 opcode = MULT_EXPR;
9333 /* FALLTHRU */
9334 case BUILT_IN_SMUL_OVERFLOW:
9335 case BUILT_IN_SMULL_OVERFLOW:
9336 case BUILT_IN_SMULLL_OVERFLOW:
9337 case BUILT_IN_UMUL_OVERFLOW:
9338 case BUILT_IN_UMULL_OVERFLOW:
9339 case BUILT_IN_UMULLL_OVERFLOW:
9340 ifn = IFN_MUL_OVERFLOW;
9341 break;
9342 default:
9343 gcc_unreachable ();
9346 /* For the "generic" overloads, the first two arguments can have different
9347 types and the last argument determines the target type to use to check
9348 for overflow. The arguments of the other overloads all have the same
9349 type. */
9350 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9352 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9353 arguments are constant, attempt to fold the built-in call into a constant
9354 expression indicating whether or not it detected an overflow. */
9355 if (ovf_only
9356 && TREE_CODE (arg0) == INTEGER_CST
9357 && TREE_CODE (arg1) == INTEGER_CST)
9358 /* Perform the computation in the target type and check for overflow. */
9359 return omit_one_operand_loc (loc, boolean_type_node,
9360 arith_overflowed_p (opcode, type, arg0, arg1)
9361 ? boolean_true_node : boolean_false_node,
9362 arg2);
9364 tree ctype = build_complex_type (type);
9365 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9366 2, arg0, arg1);
9367 tree tgt = save_expr (call);
9368 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9369 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9370 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9372 if (ovf_only)
9373 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9375 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9376 tree store
9377 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9378 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9381 /* Fold a call to __builtin_FILE to a constant string. */
9383 static inline tree
9384 fold_builtin_FILE (location_t loc)
9386 if (const char *fname = LOCATION_FILE (loc))
9388 /* The documentation says this builtin is equivalent to the preprocessor
9389 __FILE__ macro so it appears appropriate to use the same file prefix
9390 mappings. */
9391 fname = remap_macro_filename (fname);
9392 return build_string_literal (strlen (fname) + 1, fname);
9395 return build_string_literal (1, "");
9398 /* Fold a call to __builtin_FUNCTION to a constant string. */
9400 static inline tree
9401 fold_builtin_FUNCTION ()
9403 const char *name = "";
9405 if (current_function_decl)
9406 name = lang_hooks.decl_printable_name (current_function_decl, 0);
9408 return build_string_literal (strlen (name) + 1, name);
9411 /* Fold a call to __builtin_LINE to an integer constant. */
9413 static inline tree
9414 fold_builtin_LINE (location_t loc, tree type)
9416 return build_int_cst (type, LOCATION_LINE (loc));
9419 /* Fold a call to built-in function FNDECL with 0 arguments.
9420 This function returns NULL_TREE if no simplification was possible. */
9422 static tree
9423 fold_builtin_0 (location_t loc, tree fndecl)
9425 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9426 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9427 switch (fcode)
9429 case BUILT_IN_FILE:
9430 return fold_builtin_FILE (loc);
9432 case BUILT_IN_FUNCTION:
9433 return fold_builtin_FUNCTION ();
9435 case BUILT_IN_LINE:
9436 return fold_builtin_LINE (loc, type);
9438 CASE_FLT_FN (BUILT_IN_INF):
9439 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
9440 case BUILT_IN_INFD32:
9441 case BUILT_IN_INFD64:
9442 case BUILT_IN_INFD128:
9443 return fold_builtin_inf (loc, type, true);
9445 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9446 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
9447 return fold_builtin_inf (loc, type, false);
9449 case BUILT_IN_CLASSIFY_TYPE:
9450 return fold_builtin_classify_type (NULL_TREE);
9452 default:
9453 break;
9455 return NULL_TREE;
9458 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9459 This function returns NULL_TREE if no simplification was possible. */
9461 static tree
9462 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9464 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9465 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9467 if (TREE_CODE (arg0) == ERROR_MARK)
9468 return NULL_TREE;
9470 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
9471 return ret;
9473 switch (fcode)
9475 case BUILT_IN_CONSTANT_P:
9477 tree val = fold_builtin_constant_p (arg0);
9479 /* Gimplification will pull the CALL_EXPR for the builtin out of
9480 an if condition. When not optimizing, we'll not CSE it back.
9481 To avoid link error types of regressions, return false now. */
9482 if (!val && !optimize)
9483 val = integer_zero_node;
9485 return val;
9488 case BUILT_IN_CLASSIFY_TYPE:
9489 return fold_builtin_classify_type (arg0);
9491 case BUILT_IN_STRLEN:
9492 return fold_builtin_strlen (loc, type, arg0);
9494 CASE_FLT_FN (BUILT_IN_FABS):
9495 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9496 case BUILT_IN_FABSD32:
9497 case BUILT_IN_FABSD64:
9498 case BUILT_IN_FABSD128:
9499 return fold_builtin_fabs (loc, arg0, type);
9501 case BUILT_IN_ABS:
9502 case BUILT_IN_LABS:
9503 case BUILT_IN_LLABS:
9504 case BUILT_IN_IMAXABS:
9505 return fold_builtin_abs (loc, arg0, type);
9507 CASE_FLT_FN (BUILT_IN_CONJ):
9508 if (validate_arg (arg0, COMPLEX_TYPE)
9509 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9510 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9511 break;
9513 CASE_FLT_FN (BUILT_IN_CREAL):
9514 if (validate_arg (arg0, COMPLEX_TYPE)
9515 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9516 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9517 break;
9519 CASE_FLT_FN (BUILT_IN_CIMAG):
9520 if (validate_arg (arg0, COMPLEX_TYPE)
9521 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9522 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9523 break;
9525 CASE_FLT_FN (BUILT_IN_CARG):
9526 return fold_builtin_carg (loc, arg0, type);
9528 case BUILT_IN_ISASCII:
9529 return fold_builtin_isascii (loc, arg0);
9531 case BUILT_IN_TOASCII:
9532 return fold_builtin_toascii (loc, arg0);
9534 case BUILT_IN_ISDIGIT:
9535 return fold_builtin_isdigit (loc, arg0);
9537 CASE_FLT_FN (BUILT_IN_FINITE):
9538 case BUILT_IN_FINITED32:
9539 case BUILT_IN_FINITED64:
9540 case BUILT_IN_FINITED128:
9541 case BUILT_IN_ISFINITE:
9543 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9544 if (ret)
9545 return ret;
9546 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9549 CASE_FLT_FN (BUILT_IN_ISINF):
9550 case BUILT_IN_ISINFD32:
9551 case BUILT_IN_ISINFD64:
9552 case BUILT_IN_ISINFD128:
9554 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9555 if (ret)
9556 return ret;
9557 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9560 case BUILT_IN_ISNORMAL:
9561 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9563 case BUILT_IN_ISINF_SIGN:
9564 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9566 CASE_FLT_FN (BUILT_IN_ISNAN):
9567 case BUILT_IN_ISNAND32:
9568 case BUILT_IN_ISNAND64:
9569 case BUILT_IN_ISNAND128:
9570 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9572 case BUILT_IN_FREE:
9573 if (integer_zerop (arg0))
9574 return build_empty_stmt (loc);
9575 break;
9577 default:
9578 break;
9581 return NULL_TREE;
9585 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9586 This function returns NULL_TREE if no simplification was possible. */
9588 static tree
9589 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
9591 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9592 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9594 if (TREE_CODE (arg0) == ERROR_MARK
9595 || TREE_CODE (arg1) == ERROR_MARK)
9596 return NULL_TREE;
9598 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9599 return ret;
9601 switch (fcode)
9603 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9604 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9605 if (validate_arg (arg0, REAL_TYPE)
9606 && validate_arg (arg1, POINTER_TYPE))
9607 return do_mpfr_lgamma_r (arg0, arg1, type);
9608 break;
9610 CASE_FLT_FN (BUILT_IN_FREXP):
9611 return fold_builtin_frexp (loc, arg0, arg1, type);
9613 CASE_FLT_FN (BUILT_IN_MODF):
9614 return fold_builtin_modf (loc, arg0, arg1, type);
9616 case BUILT_IN_STRSPN:
9617 return fold_builtin_strspn (loc, arg0, arg1);
9619 case BUILT_IN_STRCSPN:
9620 return fold_builtin_strcspn (loc, arg0, arg1);
9622 case BUILT_IN_STRPBRK:
9623 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9625 case BUILT_IN_EXPECT:
9626 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
9628 case BUILT_IN_ISGREATER:
9629 return fold_builtin_unordered_cmp (loc, fndecl,
9630 arg0, arg1, UNLE_EXPR, LE_EXPR);
9631 case BUILT_IN_ISGREATEREQUAL:
9632 return fold_builtin_unordered_cmp (loc, fndecl,
9633 arg0, arg1, UNLT_EXPR, LT_EXPR);
9634 case BUILT_IN_ISLESS:
9635 return fold_builtin_unordered_cmp (loc, fndecl,
9636 arg0, arg1, UNGE_EXPR, GE_EXPR);
9637 case BUILT_IN_ISLESSEQUAL:
9638 return fold_builtin_unordered_cmp (loc, fndecl,
9639 arg0, arg1, UNGT_EXPR, GT_EXPR);
9640 case BUILT_IN_ISLESSGREATER:
9641 return fold_builtin_unordered_cmp (loc, fndecl,
9642 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9643 case BUILT_IN_ISUNORDERED:
9644 return fold_builtin_unordered_cmp (loc, fndecl,
9645 arg0, arg1, UNORDERED_EXPR,
9646 NOP_EXPR);
9648 /* We do the folding for va_start in the expander. */
9649 case BUILT_IN_VA_START:
9650 break;
9652 case BUILT_IN_OBJECT_SIZE:
9653 return fold_builtin_object_size (arg0, arg1);
9655 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9656 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9658 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9659 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9661 default:
9662 break;
9664 return NULL_TREE;
9667 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9668 and ARG2.
9669 This function returns NULL_TREE if no simplification was possible. */
9671 static tree
9672 fold_builtin_3 (location_t loc, tree fndecl,
9673 tree arg0, tree arg1, tree arg2)
9675 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9676 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9678 if (TREE_CODE (arg0) == ERROR_MARK
9679 || TREE_CODE (arg1) == ERROR_MARK
9680 || TREE_CODE (arg2) == ERROR_MARK)
9681 return NULL_TREE;
9683 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9684 arg0, arg1, arg2))
9685 return ret;
9687 switch (fcode)
9690 CASE_FLT_FN (BUILT_IN_SINCOS):
9691 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9693 CASE_FLT_FN (BUILT_IN_REMQUO):
9694 if (validate_arg (arg0, REAL_TYPE)
9695 && validate_arg (arg1, REAL_TYPE)
9696 && validate_arg (arg2, POINTER_TYPE))
9697 return do_mpfr_remquo (arg0, arg1, arg2);
9698 break;
9700 case BUILT_IN_MEMCMP:
9701 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9703 case BUILT_IN_EXPECT:
9704 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
9706 case BUILT_IN_EXPECT_WITH_PROBABILITY:
9707 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
9709 case BUILT_IN_ADD_OVERFLOW:
9710 case BUILT_IN_SUB_OVERFLOW:
9711 case BUILT_IN_MUL_OVERFLOW:
9712 case BUILT_IN_ADD_OVERFLOW_P:
9713 case BUILT_IN_SUB_OVERFLOW_P:
9714 case BUILT_IN_MUL_OVERFLOW_P:
9715 case BUILT_IN_SADD_OVERFLOW:
9716 case BUILT_IN_SADDL_OVERFLOW:
9717 case BUILT_IN_SADDLL_OVERFLOW:
9718 case BUILT_IN_SSUB_OVERFLOW:
9719 case BUILT_IN_SSUBL_OVERFLOW:
9720 case BUILT_IN_SSUBLL_OVERFLOW:
9721 case BUILT_IN_SMUL_OVERFLOW:
9722 case BUILT_IN_SMULL_OVERFLOW:
9723 case BUILT_IN_SMULLL_OVERFLOW:
9724 case BUILT_IN_UADD_OVERFLOW:
9725 case BUILT_IN_UADDL_OVERFLOW:
9726 case BUILT_IN_UADDLL_OVERFLOW:
9727 case BUILT_IN_USUB_OVERFLOW:
9728 case BUILT_IN_USUBL_OVERFLOW:
9729 case BUILT_IN_USUBLL_OVERFLOW:
9730 case BUILT_IN_UMUL_OVERFLOW:
9731 case BUILT_IN_UMULL_OVERFLOW:
9732 case BUILT_IN_UMULLL_OVERFLOW:
9733 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9735 default:
9736 break;
9738 return NULL_TREE;
9741 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9742 arguments. IGNORE is true if the result of the
9743 function call is ignored. This function returns NULL_TREE if no
9744 simplification was possible. */
9746 tree
9747 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9749 tree ret = NULL_TREE;
9751 switch (nargs)
9753 case 0:
9754 ret = fold_builtin_0 (loc, fndecl);
9755 break;
9756 case 1:
9757 ret = fold_builtin_1 (loc, fndecl, args[0]);
9758 break;
9759 case 2:
9760 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9761 break;
9762 case 3:
9763 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9764 break;
9765 default:
9766 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9767 break;
9769 if (ret)
9771 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9772 SET_EXPR_LOCATION (ret, loc);
9773 return ret;
9775 return NULL_TREE;
9778 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9779 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9780 of arguments in ARGS to be omitted. OLDNARGS is the number of
9781 elements in ARGS. */
9783 static tree
9784 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9785 int skip, tree fndecl, int n, va_list newargs)
9787 int nargs = oldnargs - skip + n;
9788 tree *buffer;
9790 if (n > 0)
9792 int i, j;
9794 buffer = XALLOCAVEC (tree, nargs);
9795 for (i = 0; i < n; i++)
9796 buffer[i] = va_arg (newargs, tree);
9797 for (j = skip; j < oldnargs; j++, i++)
9798 buffer[i] = args[j];
9800 else
9801 buffer = args + skip;
9803 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9806 /* Return true if FNDECL shouldn't be folded right now.
9807 If a built-in function has an inline attribute always_inline
9808 wrapper, defer folding it after always_inline functions have
9809 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9810 might not be performed. */
9812 bool
9813 avoid_folding_inline_builtin (tree fndecl)
9815 return (DECL_DECLARED_INLINE_P (fndecl)
9816 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9817 && cfun
9818 && !cfun->always_inline_functions_inlined
9819 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9822 /* A wrapper function for builtin folding that prevents warnings for
9823 "statement without effect" and the like, caused by removing the
9824 call node earlier than the warning is generated. */
9826 tree
9827 fold_call_expr (location_t loc, tree exp, bool ignore)
9829 tree ret = NULL_TREE;
9830 tree fndecl = get_callee_fndecl (exp);
9831 if (fndecl && fndecl_built_in_p (fndecl)
9832 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9833 yet. Defer folding until we see all the arguments
9834 (after inlining). */
9835 && !CALL_EXPR_VA_ARG_PACK (exp))
9837 int nargs = call_expr_nargs (exp);
9839 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9840 instead last argument is __builtin_va_arg_pack (). Defer folding
9841 even in that case, until arguments are finalized. */
9842 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9844 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9845 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9846 return NULL_TREE;
9849 if (avoid_folding_inline_builtin (fndecl))
9850 return NULL_TREE;
9852 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9853 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9854 CALL_EXPR_ARGP (exp), ignore);
9855 else
9857 tree *args = CALL_EXPR_ARGP (exp);
9858 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9859 if (ret)
9860 return ret;
9863 return NULL_TREE;
9866 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9867 N arguments are passed in the array ARGARRAY. Return a folded
9868 expression or NULL_TREE if no simplification was possible. */
9870 tree
9871 fold_builtin_call_array (location_t loc, tree,
9872 tree fn,
9873 int n,
9874 tree *argarray)
9876 if (TREE_CODE (fn) != ADDR_EXPR)
9877 return NULL_TREE;
9879 tree fndecl = TREE_OPERAND (fn, 0);
9880 if (TREE_CODE (fndecl) == FUNCTION_DECL
9881 && fndecl_built_in_p (fndecl))
9883 /* If last argument is __builtin_va_arg_pack (), arguments to this
9884 function are not finalized yet. Defer folding until they are. */
9885 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9887 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9888 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9889 return NULL_TREE;
9891 if (avoid_folding_inline_builtin (fndecl))
9892 return NULL_TREE;
9893 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9894 return targetm.fold_builtin (fndecl, n, argarray, false);
9895 else
9896 return fold_builtin_n (loc, fndecl, argarray, n, false);
9899 return NULL_TREE;
9902 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9903 along with N new arguments specified as the "..." parameters. SKIP
9904 is the number of arguments in EXP to be omitted. This function is used
9905 to do varargs-to-varargs transformations. */
9907 static tree
9908 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9910 va_list ap;
9911 tree t;
9913 va_start (ap, n);
9914 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9915 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9916 va_end (ap);
9918 return t;
9921 /* Validate a single argument ARG against a tree code CODE representing
9922 a type. Return true when argument is valid. */
9924 static bool
9925 validate_arg (const_tree arg, enum tree_code code)
9927 if (!arg)
9928 return false;
9929 else if (code == POINTER_TYPE)
9930 return POINTER_TYPE_P (TREE_TYPE (arg));
9931 else if (code == INTEGER_TYPE)
9932 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9933 return code == TREE_CODE (TREE_TYPE (arg));
9936 /* This function validates the types of a function call argument list
9937 against a specified list of tree_codes. If the last specifier is a 0,
9938 that represents an ellipses, otherwise the last specifier must be a
9939 VOID_TYPE.
9941 This is the GIMPLE version of validate_arglist. Eventually we want to
9942 completely convert builtins.c to work from GIMPLEs and the tree based
9943 validate_arglist will then be removed. */
9945 bool
9946 validate_gimple_arglist (const gcall *call, ...)
9948 enum tree_code code;
9949 bool res = 0;
9950 va_list ap;
9951 const_tree arg;
9952 size_t i;
9954 va_start (ap, call);
9955 i = 0;
9959 code = (enum tree_code) va_arg (ap, int);
9960 switch (code)
9962 case 0:
9963 /* This signifies an ellipses, any further arguments are all ok. */
9964 res = true;
9965 goto end;
9966 case VOID_TYPE:
9967 /* This signifies an endlink, if no arguments remain, return
9968 true, otherwise return false. */
9969 res = (i == gimple_call_num_args (call));
9970 goto end;
9971 default:
9972 /* If no parameters remain or the parameter's code does not
9973 match the specified code, return false. Otherwise continue
9974 checking any remaining arguments. */
9975 arg = gimple_call_arg (call, i++);
9976 if (!validate_arg (arg, code))
9977 goto end;
9978 break;
9981 while (1);
9983 /* We need gotos here since we can only have one VA_CLOSE in a
9984 function. */
9985 end: ;
9986 va_end (ap);
9988 return res;
9991 /* Default target-specific builtin expander that does nothing. */
9994 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9995 rtx target ATTRIBUTE_UNUSED,
9996 rtx subtarget ATTRIBUTE_UNUSED,
9997 machine_mode mode ATTRIBUTE_UNUSED,
9998 int ignore ATTRIBUTE_UNUSED)
10000 return NULL_RTX;
10003 /* Returns true is EXP represents data that would potentially reside
10004 in a readonly section. */
10006 bool
10007 readonly_data_expr (tree exp)
10009 STRIP_NOPS (exp);
10011 if (TREE_CODE (exp) != ADDR_EXPR)
10012 return false;
10014 exp = get_base_address (TREE_OPERAND (exp, 0));
10015 if (!exp)
10016 return false;
10018 /* Make sure we call decl_readonly_section only for trees it
10019 can handle (since it returns true for everything it doesn't
10020 understand). */
10021 if (TREE_CODE (exp) == STRING_CST
10022 || TREE_CODE (exp) == CONSTRUCTOR
10023 || (VAR_P (exp) && TREE_STATIC (exp)))
10024 return decl_readonly_section (exp, 0);
10025 else
10026 return false;
10029 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10030 to the call, and TYPE is its return type.
10032 Return NULL_TREE if no simplification was possible, otherwise return the
10033 simplified form of the call as a tree.
10035 The simplified form may be a constant or other expression which
10036 computes the same value, but in a more efficient manner (including
10037 calls to other builtin functions).
10039 The call may contain arguments which need to be evaluated, but
10040 which are not useful to determine the result of the call. In
10041 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10042 COMPOUND_EXPR will be an argument which must be evaluated.
10043 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10044 COMPOUND_EXPR in the chain will contain the tree for the simplified
10045 form of the builtin function call. */
10047 static tree
10048 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
10050 if (!validate_arg (s1, POINTER_TYPE)
10051 || !validate_arg (s2, POINTER_TYPE))
10052 return NULL_TREE;
10053 else
10055 tree fn;
10056 const char *p1, *p2;
10058 p2 = c_getstr (s2);
10059 if (p2 == NULL)
10060 return NULL_TREE;
10062 p1 = c_getstr (s1);
10063 if (p1 != NULL)
10065 const char *r = strpbrk (p1, p2);
10066 tree tem;
10068 if (r == NULL)
10069 return build_int_cst (TREE_TYPE (s1), 0);
10071 /* Return an offset into the constant string argument. */
10072 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10073 return fold_convert_loc (loc, type, tem);
10076 if (p2[0] == '\0')
10077 /* strpbrk(x, "") == NULL.
10078 Evaluate and ignore s1 in case it had side-effects. */
10079 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
10081 if (p2[1] != '\0')
10082 return NULL_TREE; /* Really call strpbrk. */
10084 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10085 if (!fn)
10086 return NULL_TREE;
10088 /* New argument list transforming strpbrk(s1, s2) to
10089 strchr(s1, s2[0]). */
10090 return build_call_expr_loc (loc, fn, 2, s1,
10091 build_int_cst (integer_type_node, p2[0]));
10095 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10096 to the call.
10098 Return NULL_TREE if no simplification was possible, otherwise return the
10099 simplified form of the call as a tree.
10101 The simplified form may be a constant or other expression which
10102 computes the same value, but in a more efficient manner (including
10103 calls to other builtin functions).
10105 The call may contain arguments which need to be evaluated, but
10106 which are not useful to determine the result of the call. In
10107 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10108 COMPOUND_EXPR will be an argument which must be evaluated.
10109 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10110 COMPOUND_EXPR in the chain will contain the tree for the simplified
10111 form of the builtin function call. */
10113 static tree
10114 fold_builtin_strspn (location_t loc, tree s1, tree s2)
10116 if (!validate_arg (s1, POINTER_TYPE)
10117 || !validate_arg (s2, POINTER_TYPE))
10118 return NULL_TREE;
10119 else
10121 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10123 /* If either argument is "", return NULL_TREE. */
10124 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10125 /* Evaluate and ignore both arguments in case either one has
10126 side-effects. */
10127 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
10128 s1, s2);
10129 return NULL_TREE;
10133 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10134 to the call.
10136 Return NULL_TREE if no simplification was possible, otherwise return the
10137 simplified form of the call as a tree.
10139 The simplified form may be a constant or other expression which
10140 computes the same value, but in a more efficient manner (including
10141 calls to other builtin functions).
10143 The call may contain arguments which need to be evaluated, but
10144 which are not useful to determine the result of the call. In
10145 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10146 COMPOUND_EXPR will be an argument which must be evaluated.
10147 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10148 COMPOUND_EXPR in the chain will contain the tree for the simplified
10149 form of the builtin function call. */
10151 static tree
10152 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
10154 if (!validate_arg (s1, POINTER_TYPE)
10155 || !validate_arg (s2, POINTER_TYPE))
10156 return NULL_TREE;
10157 else
10159 /* If the first argument is "", return NULL_TREE. */
10160 const char *p1 = c_getstr (s1);
10161 if (p1 && *p1 == '\0')
10163 /* Evaluate and ignore argument s2 in case it has
10164 side-effects. */
10165 return omit_one_operand_loc (loc, size_type_node,
10166 size_zero_node, s2);
10169 /* If the second argument is "", return __builtin_strlen(s1). */
10170 const char *p2 = c_getstr (s2);
10171 if (p2 && *p2 == '\0')
10173 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
10175 /* If the replacement _DECL isn't initialized, don't do the
10176 transformation. */
10177 if (!fn)
10178 return NULL_TREE;
10180 return build_call_expr_loc (loc, fn, 1, s1);
10182 return NULL_TREE;
10186 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10187 produced. False otherwise. This is done so that we don't output the error
10188 or warning twice or three times. */
10190 bool
10191 fold_builtin_next_arg (tree exp, bool va_start_p)
10193 tree fntype = TREE_TYPE (current_function_decl);
10194 int nargs = call_expr_nargs (exp);
10195 tree arg;
10196 /* There is good chance the current input_location points inside the
10197 definition of the va_start macro (perhaps on the token for
10198 builtin) in a system header, so warnings will not be emitted.
10199 Use the location in real source code. */
10200 source_location current_location =
10201 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10202 NULL);
10204 if (!stdarg_p (fntype))
10206 error ("%<va_start%> used in function with fixed args");
10207 return true;
10210 if (va_start_p)
10212 if (va_start_p && (nargs != 2))
10214 error ("wrong number of arguments to function %<va_start%>");
10215 return true;
10217 arg = CALL_EXPR_ARG (exp, 1);
10219 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10220 when we checked the arguments and if needed issued a warning. */
10221 else
10223 if (nargs == 0)
10225 /* Evidently an out of date version of <stdarg.h>; can't validate
10226 va_start's second argument, but can still work as intended. */
10227 warning_at (current_location,
10228 OPT_Wvarargs,
10229 "%<__builtin_next_arg%> called without an argument");
10230 return true;
10232 else if (nargs > 1)
10234 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10235 return true;
10237 arg = CALL_EXPR_ARG (exp, 0);
10240 if (TREE_CODE (arg) == SSA_NAME)
10241 arg = SSA_NAME_VAR (arg);
10243 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10244 or __builtin_next_arg (0) the first time we see it, after checking
10245 the arguments and if needed issuing a warning. */
10246 if (!integer_zerop (arg))
10248 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10250 /* Strip off all nops for the sake of the comparison. This
10251 is not quite the same as STRIP_NOPS. It does more.
10252 We must also strip off INDIRECT_EXPR for C++ reference
10253 parameters. */
10254 while (CONVERT_EXPR_P (arg)
10255 || TREE_CODE (arg) == INDIRECT_REF)
10256 arg = TREE_OPERAND (arg, 0);
10257 if (arg != last_parm)
10259 /* FIXME: Sometimes with the tree optimizers we can get the
10260 not the last argument even though the user used the last
10261 argument. We just warn and set the arg to be the last
10262 argument so that we will get wrong-code because of
10263 it. */
10264 warning_at (current_location,
10265 OPT_Wvarargs,
10266 "second parameter of %<va_start%> not last named argument");
10269 /* Undefined by C99 7.15.1.4p4 (va_start):
10270 "If the parameter parmN is declared with the register storage
10271 class, with a function or array type, or with a type that is
10272 not compatible with the type that results after application of
10273 the default argument promotions, the behavior is undefined."
10275 else if (DECL_REGISTER (arg))
10277 warning_at (current_location,
10278 OPT_Wvarargs,
10279 "undefined behavior when second parameter of "
10280 "%<va_start%> is declared with %<register%> storage");
10283 /* We want to verify the second parameter just once before the tree
10284 optimizers are run and then avoid keeping it in the tree,
10285 as otherwise we could warn even for correct code like:
10286 void foo (int i, ...)
10287 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10288 if (va_start_p)
10289 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10290 else
10291 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10293 return false;
10297 /* Expand a call EXP to __builtin_object_size. */
10299 static rtx
10300 expand_builtin_object_size (tree exp)
10302 tree ost;
10303 int object_size_type;
10304 tree fndecl = get_callee_fndecl (exp);
10306 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10308 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
10309 exp, fndecl);
10310 expand_builtin_trap ();
10311 return const0_rtx;
10314 ost = CALL_EXPR_ARG (exp, 1);
10315 STRIP_NOPS (ost);
10317 if (TREE_CODE (ost) != INTEGER_CST
10318 || tree_int_cst_sgn (ost) < 0
10319 || compare_tree_int (ost, 3) > 0)
10321 error ("%Klast argument of %qD is not integer constant between 0 and 3",
10322 exp, fndecl);
10323 expand_builtin_trap ();
10324 return const0_rtx;
10327 object_size_type = tree_to_shwi (ost);
10329 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10332 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10333 FCODE is the BUILT_IN_* to use.
10334 Return NULL_RTX if we failed; the caller should emit a normal call,
10335 otherwise try to get the result in TARGET, if convenient (and in
10336 mode MODE if that's convenient). */
10338 static rtx
10339 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10340 enum built_in_function fcode)
10342 if (!validate_arglist (exp,
10343 POINTER_TYPE,
10344 fcode == BUILT_IN_MEMSET_CHK
10345 ? INTEGER_TYPE : POINTER_TYPE,
10346 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10347 return NULL_RTX;
10349 tree dest = CALL_EXPR_ARG (exp, 0);
10350 tree src = CALL_EXPR_ARG (exp, 1);
10351 tree len = CALL_EXPR_ARG (exp, 2);
10352 tree size = CALL_EXPR_ARG (exp, 3);
10354 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
10355 /*str=*/NULL_TREE, size);
10357 if (!tree_fits_uhwi_p (size))
10358 return NULL_RTX;
10360 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10362 /* Avoid transforming the checking call to an ordinary one when
10363 an overflow has been detected or when the call couldn't be
10364 validated because the size is not constant. */
10365 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10366 return NULL_RTX;
10368 tree fn = NULL_TREE;
10369 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10370 mem{cpy,pcpy,move,set} is available. */
10371 switch (fcode)
10373 case BUILT_IN_MEMCPY_CHK:
10374 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10375 break;
10376 case BUILT_IN_MEMPCPY_CHK:
10377 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10378 break;
10379 case BUILT_IN_MEMMOVE_CHK:
10380 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10381 break;
10382 case BUILT_IN_MEMSET_CHK:
10383 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10384 break;
10385 default:
10386 break;
10389 if (! fn)
10390 return NULL_RTX;
10392 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10393 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10394 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10395 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10397 else if (fcode == BUILT_IN_MEMSET_CHK)
10398 return NULL_RTX;
10399 else
10401 unsigned int dest_align = get_pointer_alignment (dest);
10403 /* If DEST is not a pointer type, call the normal function. */
10404 if (dest_align == 0)
10405 return NULL_RTX;
10407 /* If SRC and DEST are the same (and not volatile), do nothing. */
10408 if (operand_equal_p (src, dest, 0))
10410 tree expr;
10412 if (fcode != BUILT_IN_MEMPCPY_CHK)
10414 /* Evaluate and ignore LEN in case it has side-effects. */
10415 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10416 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10419 expr = fold_build_pointer_plus (dest, len);
10420 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10423 /* __memmove_chk special case. */
10424 if (fcode == BUILT_IN_MEMMOVE_CHK)
10426 unsigned int src_align = get_pointer_alignment (src);
10428 if (src_align == 0)
10429 return NULL_RTX;
10431 /* If src is categorized for a readonly section we can use
10432 normal __memcpy_chk. */
10433 if (readonly_data_expr (src))
10435 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10436 if (!fn)
10437 return NULL_RTX;
10438 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10439 dest, src, len, size);
10440 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10441 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10442 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10445 return NULL_RTX;
10449 /* Emit warning if a buffer overflow is detected at compile time. */
10451 static void
10452 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10454 /* The source string. */
10455 tree srcstr = NULL_TREE;
10456 /* The size of the destination object. */
10457 tree objsize = NULL_TREE;
10458 /* The string that is being concatenated with (as in __strcat_chk)
10459 or null if it isn't. */
10460 tree catstr = NULL_TREE;
10461 /* The maximum length of the source sequence in a bounded operation
10462 (such as __strncat_chk) or null if the operation isn't bounded
10463 (such as __strcat_chk). */
10464 tree maxread = NULL_TREE;
10465 /* The exact size of the access (such as in __strncpy_chk). */
10466 tree size = NULL_TREE;
10468 switch (fcode)
10470 case BUILT_IN_STRCPY_CHK:
10471 case BUILT_IN_STPCPY_CHK:
10472 srcstr = CALL_EXPR_ARG (exp, 1);
10473 objsize = CALL_EXPR_ARG (exp, 2);
10474 break;
10476 case BUILT_IN_STRCAT_CHK:
10477 /* For __strcat_chk the warning will be emitted only if overflowing
10478 by at least strlen (dest) + 1 bytes. */
10479 catstr = CALL_EXPR_ARG (exp, 0);
10480 srcstr = CALL_EXPR_ARG (exp, 1);
10481 objsize = CALL_EXPR_ARG (exp, 2);
10482 break;
10484 case BUILT_IN_STRNCAT_CHK:
10485 catstr = CALL_EXPR_ARG (exp, 0);
10486 srcstr = CALL_EXPR_ARG (exp, 1);
10487 maxread = CALL_EXPR_ARG (exp, 2);
10488 objsize = CALL_EXPR_ARG (exp, 3);
10489 break;
10491 case BUILT_IN_STRNCPY_CHK:
10492 case BUILT_IN_STPNCPY_CHK:
10493 srcstr = CALL_EXPR_ARG (exp, 1);
10494 size = CALL_EXPR_ARG (exp, 2);
10495 objsize = CALL_EXPR_ARG (exp, 3);
10496 break;
10498 case BUILT_IN_SNPRINTF_CHK:
10499 case BUILT_IN_VSNPRINTF_CHK:
10500 maxread = CALL_EXPR_ARG (exp, 1);
10501 objsize = CALL_EXPR_ARG (exp, 3);
10502 break;
10503 default:
10504 gcc_unreachable ();
10507 if (catstr && maxread)
10509 /* Check __strncat_chk. There is no way to determine the length
10510 of the string to which the source string is being appended so
10511 just warn when the length of the source string is not known. */
10512 check_strncat_sizes (exp, objsize);
10513 return;
10516 /* The destination argument is the first one for all built-ins above. */
10517 tree dst = CALL_EXPR_ARG (exp, 0);
10519 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
10522 /* Emit warning if a buffer overflow is detected at compile time
10523 in __sprintf_chk/__vsprintf_chk calls. */
10525 static void
10526 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10528 tree size, len, fmt;
10529 const char *fmt_str;
10530 int nargs = call_expr_nargs (exp);
10532 /* Verify the required arguments in the original call. */
10534 if (nargs < 4)
10535 return;
10536 size = CALL_EXPR_ARG (exp, 2);
10537 fmt = CALL_EXPR_ARG (exp, 3);
10539 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10540 return;
10542 /* Check whether the format is a literal string constant. */
10543 fmt_str = c_getstr (fmt);
10544 if (fmt_str == NULL)
10545 return;
10547 if (!init_target_chars ())
10548 return;
10550 /* If the format doesn't contain % args or %%, we know its size. */
10551 if (strchr (fmt_str, target_percent) == 0)
10552 len = build_int_cstu (size_type_node, strlen (fmt_str));
10553 /* If the format is "%s" and first ... argument is a string literal,
10554 we know it too. */
10555 else if (fcode == BUILT_IN_SPRINTF_CHK
10556 && strcmp (fmt_str, target_percent_s) == 0)
10558 tree arg;
10560 if (nargs < 5)
10561 return;
10562 arg = CALL_EXPR_ARG (exp, 4);
10563 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10564 return;
10566 len = c_strlen (arg, 1);
10567 if (!len || ! tree_fits_uhwi_p (len))
10568 return;
10570 else
10571 return;
10573 /* Add one for the terminating nul. */
10574 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10576 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
10577 /*maxread=*/NULL_TREE, len, size);
10580 /* Emit warning if a free is called with address of a variable. */
10582 static void
10583 maybe_emit_free_warning (tree exp)
10585 tree arg = CALL_EXPR_ARG (exp, 0);
10587 STRIP_NOPS (arg);
10588 if (TREE_CODE (arg) != ADDR_EXPR)
10589 return;
10591 arg = get_base_address (TREE_OPERAND (arg, 0));
10592 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
10593 return;
10595 if (SSA_VAR_P (arg))
10596 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10597 "%Kattempt to free a non-heap object %qD", exp, arg);
10598 else
10599 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10600 "%Kattempt to free a non-heap object", exp);
10603 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10604 if possible. */
10606 static tree
10607 fold_builtin_object_size (tree ptr, tree ost)
10609 unsigned HOST_WIDE_INT bytes;
10610 int object_size_type;
10612 if (!validate_arg (ptr, POINTER_TYPE)
10613 || !validate_arg (ost, INTEGER_TYPE))
10614 return NULL_TREE;
10616 STRIP_NOPS (ost);
10618 if (TREE_CODE (ost) != INTEGER_CST
10619 || tree_int_cst_sgn (ost) < 0
10620 || compare_tree_int (ost, 3) > 0)
10621 return NULL_TREE;
10623 object_size_type = tree_to_shwi (ost);
10625 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10626 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10627 and (size_t) 0 for types 2 and 3. */
10628 if (TREE_SIDE_EFFECTS (ptr))
10629 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10631 if (TREE_CODE (ptr) == ADDR_EXPR)
10633 compute_builtin_object_size (ptr, object_size_type, &bytes);
10634 if (wi::fits_to_tree_p (bytes, size_type_node))
10635 return build_int_cstu (size_type_node, bytes);
10637 else if (TREE_CODE (ptr) == SSA_NAME)
10639 /* If object size is not known yet, delay folding until
10640 later. Maybe subsequent passes will help determining
10641 it. */
10642 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10643 && wi::fits_to_tree_p (bytes, size_type_node))
10644 return build_int_cstu (size_type_node, bytes);
10647 return NULL_TREE;
10650 /* Builtins with folding operations that operate on "..." arguments
10651 need special handling; we need to store the arguments in a convenient
10652 data structure before attempting any folding. Fortunately there are
10653 only a few builtins that fall into this category. FNDECL is the
10654 function, EXP is the CALL_EXPR for the call. */
10656 static tree
10657 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10659 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10660 tree ret = NULL_TREE;
10662 switch (fcode)
10664 case BUILT_IN_FPCLASSIFY:
10665 ret = fold_builtin_fpclassify (loc, args, nargs);
10666 break;
10668 default:
10669 break;
10671 if (ret)
10673 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10674 SET_EXPR_LOCATION (ret, loc);
10675 TREE_NO_WARNING (ret) = 1;
10676 return ret;
10678 return NULL_TREE;
10681 /* Initialize format string characters in the target charset. */
10683 bool
10684 init_target_chars (void)
10686 static bool init;
10687 if (!init)
10689 target_newline = lang_hooks.to_target_charset ('\n');
10690 target_percent = lang_hooks.to_target_charset ('%');
10691 target_c = lang_hooks.to_target_charset ('c');
10692 target_s = lang_hooks.to_target_charset ('s');
10693 if (target_newline == 0 || target_percent == 0 || target_c == 0
10694 || target_s == 0)
10695 return false;
10697 target_percent_c[0] = target_percent;
10698 target_percent_c[1] = target_c;
10699 target_percent_c[2] = '\0';
10701 target_percent_s[0] = target_percent;
10702 target_percent_s[1] = target_s;
10703 target_percent_s[2] = '\0';
10705 target_percent_s_newline[0] = target_percent;
10706 target_percent_s_newline[1] = target_s;
10707 target_percent_s_newline[2] = target_newline;
10708 target_percent_s_newline[3] = '\0';
10710 init = true;
10712 return true;
10715 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10716 and no overflow/underflow occurred. INEXACT is true if M was not
10717 exactly calculated. TYPE is the tree type for the result. This
10718 function assumes that you cleared the MPFR flags and then
10719 calculated M to see if anything subsequently set a flag prior to
10720 entering this function. Return NULL_TREE if any checks fail. */
10722 static tree
10723 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10725 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10726 overflow/underflow occurred. If -frounding-math, proceed iff the
10727 result of calling FUNC was exact. */
10728 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10729 && (!flag_rounding_math || !inexact))
10731 REAL_VALUE_TYPE rr;
10733 real_from_mpfr (&rr, m, type, GMP_RNDN);
10734 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10735 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10736 but the mpft_t is not, then we underflowed in the
10737 conversion. */
10738 if (real_isfinite (&rr)
10739 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10741 REAL_VALUE_TYPE rmode;
10743 real_convert (&rmode, TYPE_MODE (type), &rr);
10744 /* Proceed iff the specified mode can hold the value. */
10745 if (real_identical (&rmode, &rr))
10746 return build_real (type, rmode);
10749 return NULL_TREE;
10752 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10753 number and no overflow/underflow occurred. INEXACT is true if M
10754 was not exactly calculated. TYPE is the tree type for the result.
10755 This function assumes that you cleared the MPFR flags and then
10756 calculated M to see if anything subsequently set a flag prior to
10757 entering this function. Return NULL_TREE if any checks fail, if
10758 FORCE_CONVERT is true, then bypass the checks. */
10760 static tree
10761 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10763 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10764 overflow/underflow occurred. If -frounding-math, proceed iff the
10765 result of calling FUNC was exact. */
10766 if (force_convert
10767 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10768 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10769 && (!flag_rounding_math || !inexact)))
10771 REAL_VALUE_TYPE re, im;
10773 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10774 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10775 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10776 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10777 but the mpft_t is not, then we underflowed in the
10778 conversion. */
10779 if (force_convert
10780 || (real_isfinite (&re) && real_isfinite (&im)
10781 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10782 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10784 REAL_VALUE_TYPE re_mode, im_mode;
10786 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10787 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10788 /* Proceed iff the specified mode can hold the value. */
10789 if (force_convert
10790 || (real_identical (&re_mode, &re)
10791 && real_identical (&im_mode, &im)))
10792 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10793 build_real (TREE_TYPE (type), im_mode));
10796 return NULL_TREE;
10799 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10800 the pointer *(ARG_QUO) and return the result. The type is taken
10801 from the type of ARG0 and is used for setting the precision of the
10802 calculation and results. */
10804 static tree
10805 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10807 tree const type = TREE_TYPE (arg0);
10808 tree result = NULL_TREE;
10810 STRIP_NOPS (arg0);
10811 STRIP_NOPS (arg1);
10813 /* To proceed, MPFR must exactly represent the target floating point
10814 format, which only happens when the target base equals two. */
10815 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10816 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10817 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10819 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10820 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10822 if (real_isfinite (ra0) && real_isfinite (ra1))
10824 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10825 const int prec = fmt->p;
10826 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10827 tree result_rem;
10828 long integer_quo;
10829 mpfr_t m0, m1;
10831 mpfr_inits2 (prec, m0, m1, NULL);
10832 mpfr_from_real (m0, ra0, GMP_RNDN);
10833 mpfr_from_real (m1, ra1, GMP_RNDN);
10834 mpfr_clear_flags ();
10835 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10836 /* Remquo is independent of the rounding mode, so pass
10837 inexact=0 to do_mpfr_ckconv(). */
10838 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10839 mpfr_clears (m0, m1, NULL);
10840 if (result_rem)
10842 /* MPFR calculates quo in the host's long so it may
10843 return more bits in quo than the target int can hold
10844 if sizeof(host long) > sizeof(target int). This can
10845 happen even for native compilers in LP64 mode. In
10846 these cases, modulo the quo value with the largest
10847 number that the target int can hold while leaving one
10848 bit for the sign. */
10849 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10850 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10852 /* Dereference the quo pointer argument. */
10853 arg_quo = build_fold_indirect_ref (arg_quo);
10854 /* Proceed iff a valid pointer type was passed in. */
10855 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10857 /* Set the value. */
10858 tree result_quo
10859 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10860 build_int_cst (TREE_TYPE (arg_quo),
10861 integer_quo));
10862 TREE_SIDE_EFFECTS (result_quo) = 1;
10863 /* Combine the quo assignment with the rem. */
10864 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10865 result_quo, result_rem));
10870 return result;
10873 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10874 resulting value as a tree with type TYPE. The mpfr precision is
10875 set to the precision of TYPE. We assume that this mpfr function
10876 returns zero if the result could be calculated exactly within the
10877 requested precision. In addition, the integer pointer represented
10878 by ARG_SG will be dereferenced and set to the appropriate signgam
10879 (-1,1) value. */
10881 static tree
10882 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10884 tree result = NULL_TREE;
10886 STRIP_NOPS (arg);
10888 /* To proceed, MPFR must exactly represent the target floating point
10889 format, which only happens when the target base equals two. Also
10890 verify ARG is a constant and that ARG_SG is an int pointer. */
10891 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10892 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10893 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10894 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10896 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10898 /* In addition to NaN and Inf, the argument cannot be zero or a
10899 negative integer. */
10900 if (real_isfinite (ra)
10901 && ra->cl != rvc_zero
10902 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10904 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10905 const int prec = fmt->p;
10906 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10907 int inexact, sg;
10908 mpfr_t m;
10909 tree result_lg;
10911 mpfr_init2 (m, prec);
10912 mpfr_from_real (m, ra, GMP_RNDN);
10913 mpfr_clear_flags ();
10914 inexact = mpfr_lgamma (m, &sg, m, rnd);
10915 result_lg = do_mpfr_ckconv (m, type, inexact);
10916 mpfr_clear (m);
10917 if (result_lg)
10919 tree result_sg;
10921 /* Dereference the arg_sg pointer argument. */
10922 arg_sg = build_fold_indirect_ref (arg_sg);
10923 /* Assign the signgam value into *arg_sg. */
10924 result_sg = fold_build2 (MODIFY_EXPR,
10925 TREE_TYPE (arg_sg), arg_sg,
10926 build_int_cst (TREE_TYPE (arg_sg), sg));
10927 TREE_SIDE_EFFECTS (result_sg) = 1;
10928 /* Combine the signgam assignment with the lgamma result. */
10929 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10930 result_sg, result_lg));
10935 return result;
10938 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10939 mpc function FUNC on it and return the resulting value as a tree
10940 with type TYPE. The mpfr precision is set to the precision of
10941 TYPE. We assume that function FUNC returns zero if the result
10942 could be calculated exactly within the requested precision. If
10943 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10944 in the arguments and/or results. */
10946 tree
10947 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10948 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10950 tree result = NULL_TREE;
10952 STRIP_NOPS (arg0);
10953 STRIP_NOPS (arg1);
10955 /* To proceed, MPFR must exactly represent the target floating point
10956 format, which only happens when the target base equals two. */
10957 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10958 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10959 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10960 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10961 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10963 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10964 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10965 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10966 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10968 if (do_nonfinite
10969 || (real_isfinite (re0) && real_isfinite (im0)
10970 && real_isfinite (re1) && real_isfinite (im1)))
10972 const struct real_format *const fmt =
10973 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10974 const int prec = fmt->p;
10975 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10976 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10977 int inexact;
10978 mpc_t m0, m1;
10980 mpc_init2 (m0, prec);
10981 mpc_init2 (m1, prec);
10982 mpfr_from_real (mpc_realref (m0), re0, rnd);
10983 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10984 mpfr_from_real (mpc_realref (m1), re1, rnd);
10985 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10986 mpfr_clear_flags ();
10987 inexact = func (m0, m0, m1, crnd);
10988 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10989 mpc_clear (m0);
10990 mpc_clear (m1);
10994 return result;
10997 /* A wrapper function for builtin folding that prevents warnings for
10998 "statement without effect" and the like, caused by removing the
10999 call node earlier than the warning is generated. */
11001 tree
11002 fold_call_stmt (gcall *stmt, bool ignore)
11004 tree ret = NULL_TREE;
11005 tree fndecl = gimple_call_fndecl (stmt);
11006 location_t loc = gimple_location (stmt);
11007 if (fndecl && fndecl_built_in_p (fndecl)
11008 && !gimple_call_va_arg_pack_p (stmt))
11010 int nargs = gimple_call_num_args (stmt);
11011 tree *args = (nargs > 0
11012 ? gimple_call_arg_ptr (stmt, 0)
11013 : &error_mark_node);
11015 if (avoid_folding_inline_builtin (fndecl))
11016 return NULL_TREE;
11017 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11019 return targetm.fold_builtin (fndecl, nargs, args, ignore);
11021 else
11023 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11024 if (ret)
11026 /* Propagate location information from original call to
11027 expansion of builtin. Otherwise things like
11028 maybe_emit_chk_warning, that operate on the expansion
11029 of a builtin, will use the wrong location information. */
11030 if (gimple_has_location (stmt))
11032 tree realret = ret;
11033 if (TREE_CODE (ret) == NOP_EXPR)
11034 realret = TREE_OPERAND (ret, 0);
11035 if (CAN_HAVE_LOCATION_P (realret)
11036 && !EXPR_HAS_LOCATION (realret))
11037 SET_EXPR_LOCATION (realret, loc);
11038 return realret;
11040 return ret;
11044 return NULL_TREE;
11047 /* Look up the function in builtin_decl that corresponds to DECL
11048 and set ASMSPEC as its user assembler name. DECL must be a
11049 function decl that declares a builtin. */
11051 void
11052 set_builtin_user_assembler_name (tree decl, const char *asmspec)
11054 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
11055 && asmspec != 0);
11057 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
11058 set_user_assembler_name (builtin, asmspec);
11060 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
11061 && INT_TYPE_SIZE < BITS_PER_WORD)
11063 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
11064 set_user_assembler_libfunc ("ffs", asmspec);
11065 set_optab_libfunc (ffs_optab, mode, "ffs");
11069 /* Return true if DECL is a builtin that expands to a constant or similarly
11070 simple code. */
11071 bool
11072 is_simple_builtin (tree decl)
11074 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
11075 switch (DECL_FUNCTION_CODE (decl))
11077 /* Builtins that expand to constants. */
11078 case BUILT_IN_CONSTANT_P:
11079 case BUILT_IN_EXPECT:
11080 case BUILT_IN_OBJECT_SIZE:
11081 case BUILT_IN_UNREACHABLE:
11082 /* Simple register moves or loads from stack. */
11083 case BUILT_IN_ASSUME_ALIGNED:
11084 case BUILT_IN_RETURN_ADDRESS:
11085 case BUILT_IN_EXTRACT_RETURN_ADDR:
11086 case BUILT_IN_FROB_RETURN_ADDR:
11087 case BUILT_IN_RETURN:
11088 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
11089 case BUILT_IN_FRAME_ADDRESS:
11090 case BUILT_IN_VA_END:
11091 case BUILT_IN_STACK_SAVE:
11092 case BUILT_IN_STACK_RESTORE:
11093 /* Exception state returns or moves registers around. */
11094 case BUILT_IN_EH_FILTER:
11095 case BUILT_IN_EH_POINTER:
11096 case BUILT_IN_EH_COPY_VALUES:
11097 return true;
11099 default:
11100 return false;
11103 return false;
11106 /* Return true if DECL is a builtin that is not expensive, i.e., they are
11107 most probably expanded inline into reasonably simple code. This is a
11108 superset of is_simple_builtin. */
11109 bool
11110 is_inexpensive_builtin (tree decl)
11112 if (!decl)
11113 return false;
11114 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
11115 return true;
11116 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11117 switch (DECL_FUNCTION_CODE (decl))
11119 case BUILT_IN_ABS:
11120 CASE_BUILT_IN_ALLOCA:
11121 case BUILT_IN_BSWAP16:
11122 case BUILT_IN_BSWAP32:
11123 case BUILT_IN_BSWAP64:
11124 case BUILT_IN_CLZ:
11125 case BUILT_IN_CLZIMAX:
11126 case BUILT_IN_CLZL:
11127 case BUILT_IN_CLZLL:
11128 case BUILT_IN_CTZ:
11129 case BUILT_IN_CTZIMAX:
11130 case BUILT_IN_CTZL:
11131 case BUILT_IN_CTZLL:
11132 case BUILT_IN_FFS:
11133 case BUILT_IN_FFSIMAX:
11134 case BUILT_IN_FFSL:
11135 case BUILT_IN_FFSLL:
11136 case BUILT_IN_IMAXABS:
11137 case BUILT_IN_FINITE:
11138 case BUILT_IN_FINITEF:
11139 case BUILT_IN_FINITEL:
11140 case BUILT_IN_FINITED32:
11141 case BUILT_IN_FINITED64:
11142 case BUILT_IN_FINITED128:
11143 case BUILT_IN_FPCLASSIFY:
11144 case BUILT_IN_ISFINITE:
11145 case BUILT_IN_ISINF_SIGN:
11146 case BUILT_IN_ISINF:
11147 case BUILT_IN_ISINFF:
11148 case BUILT_IN_ISINFL:
11149 case BUILT_IN_ISINFD32:
11150 case BUILT_IN_ISINFD64:
11151 case BUILT_IN_ISINFD128:
11152 case BUILT_IN_ISNAN:
11153 case BUILT_IN_ISNANF:
11154 case BUILT_IN_ISNANL:
11155 case BUILT_IN_ISNAND32:
11156 case BUILT_IN_ISNAND64:
11157 case BUILT_IN_ISNAND128:
11158 case BUILT_IN_ISNORMAL:
11159 case BUILT_IN_ISGREATER:
11160 case BUILT_IN_ISGREATEREQUAL:
11161 case BUILT_IN_ISLESS:
11162 case BUILT_IN_ISLESSEQUAL:
11163 case BUILT_IN_ISLESSGREATER:
11164 case BUILT_IN_ISUNORDERED:
11165 case BUILT_IN_VA_ARG_PACK:
11166 case BUILT_IN_VA_ARG_PACK_LEN:
11167 case BUILT_IN_VA_COPY:
11168 case BUILT_IN_TRAP:
11169 case BUILT_IN_SAVEREGS:
11170 case BUILT_IN_POPCOUNTL:
11171 case BUILT_IN_POPCOUNTLL:
11172 case BUILT_IN_POPCOUNTIMAX:
11173 case BUILT_IN_POPCOUNT:
11174 case BUILT_IN_PARITYL:
11175 case BUILT_IN_PARITYLL:
11176 case BUILT_IN_PARITYIMAX:
11177 case BUILT_IN_PARITY:
11178 case BUILT_IN_LABS:
11179 case BUILT_IN_LLABS:
11180 case BUILT_IN_PREFETCH:
11181 case BUILT_IN_ACC_ON_DEVICE:
11182 return true;
11184 default:
11185 return is_simple_builtin (decl);
11188 return false;
11191 /* Return true if T is a constant and the value cast to a target char
11192 can be represented by a host char.
11193 Store the casted char constant in *P if so. */
11195 bool
11196 target_char_cst_p (tree t, char *p)
11198 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
11199 return false;
11201 *p = (char)tree_to_uhwi (t);
11202 return true;
11205 /* Return the maximum object size. */
11207 tree
11208 max_object_size (void)
11210 /* To do: Make this a configurable parameter. */
11211 return TYPE_MAX_VALUE (ptrdiff_type_node);