typeck.c (cp_truthvalue_conversion): Add tsubst_flags_t parameter and use it in calls...
[official-gcc.git] / gcc / builtins.c
blobf94151bd84d8f762de63277b78d3d76597a92b09
1 /* Expand builtin functions.
2 Copyright (C) 1988-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-restrict.h"
47 #include "stor-layout.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "tree-object-size.h"
51 #include "realmpfr.h"
52 #include "cfgrtl.h"
53 #include "except.h"
54 #include "dojump.h"
55 #include "explow.h"
56 #include "stmt.h"
57 #include "expr.h"
58 #include "libfuncs.h"
59 #include "output.h"
60 #include "typeclass.h"
61 #include "langhooks.h"
62 #include "value-prof.h"
63 #include "builtins.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "asan.h"
67 #include "internal-fn.h"
68 #include "case-cfn-macros.h"
69 #include "gimple-fold.h"
70 #include "intl.h"
71 #include "file-prefix-map.h" /* remap_macro_filename() */
72 #include "gomp-constants.h"
73 #include "omp-general.h"
74 #include "tree-dfa.h"
76 struct target_builtins default_target_builtins;
77 #if SWITCHABLE_TARGET
78 struct target_builtins *this_target_builtins = &default_target_builtins;
79 #endif
81 /* Define the names of the builtin function types and codes. */
82 const char *const built_in_class_names[BUILT_IN_LAST]
83 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
85 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
86 const char * built_in_names[(int) END_BUILTINS] =
88 #include "builtins.def"
91 /* Setup an array of builtin_info_type, make sure each element decl is
92 initialized to NULL_TREE. */
93 builtin_info_type builtin_info[(int)END_BUILTINS];
95 /* Non-zero if __builtin_constant_p should be folded right away. */
96 bool force_folding_builtin_constant_p;
98 static int target_char_cast (tree, char *);
99 static rtx get_memory_rtx (tree, tree);
100 static int apply_args_size (void);
101 static int apply_result_size (void);
102 static rtx result_vector (int, rtx);
103 static void expand_builtin_prefetch (tree);
104 static rtx expand_builtin_apply_args (void);
105 static rtx expand_builtin_apply_args_1 (void);
106 static rtx expand_builtin_apply (rtx, rtx, rtx);
107 static void expand_builtin_return (rtx);
108 static enum type_class type_to_class (tree);
109 static rtx expand_builtin_classify_type (tree);
110 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
111 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
112 static rtx expand_builtin_interclass_mathfn (tree, rtx);
113 static rtx expand_builtin_sincos (tree);
114 static rtx expand_builtin_cexpi (tree, rtx);
115 static rtx expand_builtin_int_roundingfn (tree, rtx);
116 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
117 static rtx expand_builtin_next_arg (void);
118 static rtx expand_builtin_va_start (tree);
119 static rtx expand_builtin_va_end (tree);
120 static rtx expand_builtin_va_copy (tree);
121 static rtx inline_expand_builtin_string_cmp (tree, rtx);
122 static rtx expand_builtin_strcmp (tree, rtx);
123 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
124 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
125 static rtx expand_builtin_memchr (tree, rtx);
126 static rtx expand_builtin_memcpy (tree, rtx);
127 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
128 rtx target, tree exp,
129 memop_ret retmode,
130 bool might_overlap);
131 static rtx expand_builtin_memmove (tree, rtx);
132 static rtx expand_builtin_mempcpy (tree, rtx);
133 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
134 static rtx expand_builtin_strcat (tree, rtx);
135 static rtx expand_builtin_strcpy (tree, rtx);
136 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
137 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
138 static rtx expand_builtin_stpncpy (tree, rtx);
139 static rtx expand_builtin_strncat (tree, rtx);
140 static rtx expand_builtin_strncpy (tree, rtx);
141 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
142 static rtx expand_builtin_memset (tree, rtx, machine_mode);
143 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
144 static rtx expand_builtin_bzero (tree);
145 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
146 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
147 static rtx expand_builtin_alloca (tree);
148 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
149 static rtx expand_builtin_frame_address (tree, tree);
150 static tree stabilize_va_list_loc (location_t, tree, int);
151 static rtx expand_builtin_expect (tree, rtx);
152 static rtx expand_builtin_expect_with_probability (tree, rtx);
153 static tree fold_builtin_constant_p (tree);
154 static tree fold_builtin_classify_type (tree);
155 static tree fold_builtin_strlen (location_t, tree, tree);
156 static tree fold_builtin_inf (location_t, tree, int);
157 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
158 static bool validate_arg (const_tree, enum tree_code code);
159 static rtx expand_builtin_fabs (tree, rtx, rtx);
160 static rtx expand_builtin_signbit (tree, rtx);
161 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
162 static tree fold_builtin_isascii (location_t, tree);
163 static tree fold_builtin_toascii (location_t, tree);
164 static tree fold_builtin_isdigit (location_t, tree);
165 static tree fold_builtin_fabs (location_t, tree, tree);
166 static tree fold_builtin_abs (location_t, tree, tree);
167 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
168 enum tree_code);
169 static tree fold_builtin_0 (location_t, tree);
170 static tree fold_builtin_1 (location_t, tree, tree);
171 static tree fold_builtin_2 (location_t, tree, tree, tree);
172 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
173 static tree fold_builtin_varargs (location_t, tree, tree*, int);
175 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
176 static tree fold_builtin_strspn (location_t, tree, tree);
177 static tree fold_builtin_strcspn (location_t, tree, tree);
179 static rtx expand_builtin_object_size (tree);
180 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
181 enum built_in_function);
182 static void maybe_emit_chk_warning (tree, enum built_in_function);
183 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
184 static void maybe_emit_free_warning (tree);
185 static tree fold_builtin_object_size (tree, tree);
187 unsigned HOST_WIDE_INT target_newline;
188 unsigned HOST_WIDE_INT target_percent;
189 static unsigned HOST_WIDE_INT target_c;
190 static unsigned HOST_WIDE_INT target_s;
191 char target_percent_c[3];
192 char target_percent_s[3];
193 char target_percent_s_newline[4];
194 static tree do_mpfr_remquo (tree, tree, tree);
195 static tree do_mpfr_lgamma_r (tree, tree, tree);
196 static void expand_builtin_sync_synchronize (void);
198 /* Return true if NAME starts with __builtin_ or __sync_. */
200 static bool
201 is_builtin_name (const char *name)
203 if (strncmp (name, "__builtin_", 10) == 0)
204 return true;
205 if (strncmp (name, "__sync_", 7) == 0)
206 return true;
207 if (strncmp (name, "__atomic_", 9) == 0)
208 return true;
209 return false;
212 /* Return true if NODE should be considered for inline expansion regardless
213 of the optimization level. This means whenever a function is invoked with
214 its "internal" name, which normally contains the prefix "__builtin". */
216 bool
217 called_as_built_in (tree node)
219 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
220 we want the name used to call the function, not the name it
221 will have. */
222 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
223 return is_builtin_name (name);
226 /* Compute values M and N such that M divides (address of EXP - N) and such
227 that N < M. If these numbers can be determined, store M in alignp and N in
228 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
229 *alignp and any bit-offset to *bitposp.
231 Note that the address (and thus the alignment) computed here is based
232 on the address to which a symbol resolves, whereas DECL_ALIGN is based
233 on the address at which an object is actually located. These two
234 addresses are not always the same. For example, on ARM targets,
235 the address &foo of a Thumb function foo() has the lowest bit set,
236 whereas foo() itself starts on an even address.
238 If ADDR_P is true we are taking the address of the memory reference EXP
239 and thus cannot rely on the access taking place. */
241 static bool
242 get_object_alignment_2 (tree exp, unsigned int *alignp,
243 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
245 poly_int64 bitsize, bitpos;
246 tree offset;
247 machine_mode mode;
248 int unsignedp, reversep, volatilep;
249 unsigned int align = BITS_PER_UNIT;
250 bool known_alignment = false;
252 /* Get the innermost object and the constant (bitpos) and possibly
253 variable (offset) offset of the access. */
254 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
255 &unsignedp, &reversep, &volatilep);
257 /* Extract alignment information from the innermost object and
258 possibly adjust bitpos and offset. */
259 if (TREE_CODE (exp) == FUNCTION_DECL)
261 /* Function addresses can encode extra information besides their
262 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
263 allows the low bit to be used as a virtual bit, we know
264 that the address itself must be at least 2-byte aligned. */
265 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
266 align = 2 * BITS_PER_UNIT;
268 else if (TREE_CODE (exp) == LABEL_DECL)
270 else if (TREE_CODE (exp) == CONST_DECL)
272 /* The alignment of a CONST_DECL is determined by its initializer. */
273 exp = DECL_INITIAL (exp);
274 align = TYPE_ALIGN (TREE_TYPE (exp));
275 if (CONSTANT_CLASS_P (exp))
276 align = targetm.constant_alignment (exp, align);
278 known_alignment = true;
280 else if (DECL_P (exp))
282 align = DECL_ALIGN (exp);
283 known_alignment = true;
285 else if (TREE_CODE (exp) == INDIRECT_REF
286 || TREE_CODE (exp) == MEM_REF
287 || TREE_CODE (exp) == TARGET_MEM_REF)
289 tree addr = TREE_OPERAND (exp, 0);
290 unsigned ptr_align;
291 unsigned HOST_WIDE_INT ptr_bitpos;
292 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
294 /* If the address is explicitely aligned, handle that. */
295 if (TREE_CODE (addr) == BIT_AND_EXPR
296 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
298 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
299 ptr_bitmask *= BITS_PER_UNIT;
300 align = least_bit_hwi (ptr_bitmask);
301 addr = TREE_OPERAND (addr, 0);
304 known_alignment
305 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
306 align = MAX (ptr_align, align);
308 /* Re-apply explicit alignment to the bitpos. */
309 ptr_bitpos &= ptr_bitmask;
311 /* The alignment of the pointer operand in a TARGET_MEM_REF
312 has to take the variable offset parts into account. */
313 if (TREE_CODE (exp) == TARGET_MEM_REF)
315 if (TMR_INDEX (exp))
317 unsigned HOST_WIDE_INT step = 1;
318 if (TMR_STEP (exp))
319 step = TREE_INT_CST_LOW (TMR_STEP (exp));
320 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
322 if (TMR_INDEX2 (exp))
323 align = BITS_PER_UNIT;
324 known_alignment = false;
327 /* When EXP is an actual memory reference then we can use
328 TYPE_ALIGN of a pointer indirection to derive alignment.
329 Do so only if get_pointer_alignment_1 did not reveal absolute
330 alignment knowledge and if using that alignment would
331 improve the situation. */
332 unsigned int talign;
333 if (!addr_p && !known_alignment
334 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
335 && talign > align)
336 align = talign;
337 else
339 /* Else adjust bitpos accordingly. */
340 bitpos += ptr_bitpos;
341 if (TREE_CODE (exp) == MEM_REF
342 || TREE_CODE (exp) == TARGET_MEM_REF)
343 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
346 else if (TREE_CODE (exp) == STRING_CST)
348 /* STRING_CST are the only constant objects we allow to be not
349 wrapped inside a CONST_DECL. */
350 align = TYPE_ALIGN (TREE_TYPE (exp));
351 if (CONSTANT_CLASS_P (exp))
352 align = targetm.constant_alignment (exp, align);
354 known_alignment = true;
357 /* If there is a non-constant offset part extract the maximum
358 alignment that can prevail. */
359 if (offset)
361 unsigned int trailing_zeros = tree_ctz (offset);
362 if (trailing_zeros < HOST_BITS_PER_INT)
364 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
365 if (inner)
366 align = MIN (align, inner);
370 /* Account for the alignment of runtime coefficients, so that the constant
371 bitpos is guaranteed to be accurate. */
372 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
373 if (alt_align != 0 && alt_align < align)
375 align = alt_align;
376 known_alignment = false;
379 *alignp = align;
380 *bitposp = bitpos.coeffs[0] & (align - 1);
381 return known_alignment;
384 /* For a memory reference expression EXP compute values M and N such that M
385 divides (&EXP - N) and such that N < M. If these numbers can be determined,
386 store M in alignp and N in *BITPOSP and return true. Otherwise return false
387 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
389 bool
390 get_object_alignment_1 (tree exp, unsigned int *alignp,
391 unsigned HOST_WIDE_INT *bitposp)
393 return get_object_alignment_2 (exp, alignp, bitposp, false);
396 /* Return the alignment in bits of EXP, an object. */
398 unsigned int
399 get_object_alignment (tree exp)
401 unsigned HOST_WIDE_INT bitpos = 0;
402 unsigned int align;
404 get_object_alignment_1 (exp, &align, &bitpos);
406 /* align and bitpos now specify known low bits of the pointer.
407 ptr & (align - 1) == bitpos. */
409 if (bitpos != 0)
410 align = least_bit_hwi (bitpos);
411 return align;
414 /* For a pointer valued expression EXP compute values M and N such that M
415 divides (EXP - N) and such that N < M. If these numbers can be determined,
416 store M in alignp and N in *BITPOSP and return true. Return false if
417 the results are just a conservative approximation.
419 If EXP is not a pointer, false is returned too. */
421 bool
422 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
423 unsigned HOST_WIDE_INT *bitposp)
425 STRIP_NOPS (exp);
427 if (TREE_CODE (exp) == ADDR_EXPR)
428 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
429 alignp, bitposp, true);
430 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
432 unsigned int align;
433 unsigned HOST_WIDE_INT bitpos;
434 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
435 &align, &bitpos);
436 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
437 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
438 else
440 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
441 if (trailing_zeros < HOST_BITS_PER_INT)
443 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
444 if (inner)
445 align = MIN (align, inner);
448 *alignp = align;
449 *bitposp = bitpos & (align - 1);
450 return res;
452 else if (TREE_CODE (exp) == SSA_NAME
453 && POINTER_TYPE_P (TREE_TYPE (exp)))
455 unsigned int ptr_align, ptr_misalign;
456 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
458 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
460 *bitposp = ptr_misalign * BITS_PER_UNIT;
461 *alignp = ptr_align * BITS_PER_UNIT;
462 /* Make sure to return a sensible alignment when the multiplication
463 by BITS_PER_UNIT overflowed. */
464 if (*alignp == 0)
465 *alignp = 1u << (HOST_BITS_PER_INT - 1);
466 /* We cannot really tell whether this result is an approximation. */
467 return false;
469 else
471 *bitposp = 0;
472 *alignp = BITS_PER_UNIT;
473 return false;
476 else if (TREE_CODE (exp) == INTEGER_CST)
478 *alignp = BIGGEST_ALIGNMENT;
479 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
480 & (BIGGEST_ALIGNMENT - 1));
481 return true;
484 *bitposp = 0;
485 *alignp = BITS_PER_UNIT;
486 return false;
489 /* Return the alignment in bits of EXP, a pointer valued expression.
490 The alignment returned is, by default, the alignment of the thing that
491 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
493 Otherwise, look at the expression to see if we can do better, i.e., if the
494 expression is actually pointing at an object whose alignment is tighter. */
496 unsigned int
497 get_pointer_alignment (tree exp)
499 unsigned HOST_WIDE_INT bitpos = 0;
500 unsigned int align;
502 get_pointer_alignment_1 (exp, &align, &bitpos);
504 /* align and bitpos now specify known low bits of the pointer.
505 ptr & (align - 1) == bitpos. */
507 if (bitpos != 0)
508 align = least_bit_hwi (bitpos);
510 return align;
513 /* Return the number of leading non-zero elements in the sequence
514 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
515 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
517 unsigned
518 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
520 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
522 unsigned n;
524 if (eltsize == 1)
526 /* Optimize the common case of plain char. */
527 for (n = 0; n < maxelts; n++)
529 const char *elt = (const char*) ptr + n;
530 if (!*elt)
531 break;
534 else
536 for (n = 0; n < maxelts; n++)
538 const char *elt = (const char*) ptr + n * eltsize;
539 if (!memcmp (elt, "\0\0\0\0", eltsize))
540 break;
543 return n;
546 /* For a call at LOC to a function FN that expects a string in the argument
547 ARG, issue a diagnostic due to it being a called with an argument
548 declared at NONSTR that is a character array with no terminating NUL. */
550 void
551 warn_string_no_nul (location_t loc, const char *fn, tree arg, tree decl)
553 if (TREE_NO_WARNING (arg))
554 return;
556 loc = expansion_point_location_if_in_system_header (loc);
558 if (warning_at (loc, OPT_Wstringop_overflow_,
559 "%qs argument missing terminating nul", fn))
561 inform (DECL_SOURCE_LOCATION (decl),
562 "referenced argument declared here");
563 TREE_NO_WARNING (arg) = 1;
567 /* If EXP refers to an unterminated constant character array return
568 the declaration of the object of which the array is a member or
569 element and if SIZE is not null, set *SIZE to the size of
570 the unterminated array and set *EXACT if the size is exact or
571 clear it otherwise. Otherwise return null. */
573 tree
574 unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */)
576 /* C_STRLEN will return NULL and set DECL in the info
577 structure if EXP references a unterminated array. */
578 c_strlen_data lendata = { };
579 tree len = c_strlen (exp, 1, &lendata);
580 if (len == NULL_TREE && lendata.minlen && lendata.decl)
582 if (size)
584 len = lendata.minlen;
585 if (lendata.off)
587 /* Constant offsets are already accounted for in LENDATA.MINLEN,
588 but not in a SSA_NAME + CST expression. */
589 if (TREE_CODE (lendata.off) == INTEGER_CST)
590 *exact = true;
591 else if (TREE_CODE (lendata.off) == PLUS_EXPR
592 && TREE_CODE (TREE_OPERAND (lendata.off, 1)) == INTEGER_CST)
594 /* Subtract the offset from the size of the array. */
595 *exact = false;
596 tree temp = TREE_OPERAND (lendata.off, 1);
597 temp = fold_convert (ssizetype, temp);
598 len = fold_build2 (MINUS_EXPR, ssizetype, len, temp);
600 else
601 *exact = false;
603 else
604 *exact = true;
606 *size = len;
608 return lendata.decl;
611 return NULL_TREE;
614 /* Compute the length of a null-terminated character string or wide
615 character string handling character sizes of 1, 2, and 4 bytes.
616 TREE_STRING_LENGTH is not the right way because it evaluates to
617 the size of the character array in bytes (as opposed to characters)
618 and because it can contain a zero byte in the middle.
620 ONLY_VALUE should be nonzero if the result is not going to be emitted
621 into the instruction stream and zero if it is going to be expanded.
622 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
623 is returned, otherwise NULL, since
624 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
625 evaluate the side-effects.
627 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
628 accesses. Note that this implies the result is not going to be emitted
629 into the instruction stream.
631 Additional information about the string accessed may be recorded
632 in DATA. For example, if ARG references an unterminated string,
633 then the declaration will be stored in the DECL field. If the
634 length of the unterminated string can be determined, it'll be
635 stored in the LEN field. Note this length could well be different
636 than what a C strlen call would return.
638 ELTSIZE is 1 for normal single byte character strings, and 2 or
639 4 for wide characer strings. ELTSIZE is by default 1.
641 The value returned is of type `ssizetype'. */
643 tree
644 c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
646 /* If we were not passed a DATA pointer, then get one to a local
647 structure. That avoids having to check DATA for NULL before
648 each time we want to use it. */
649 c_strlen_data local_strlen_data = { };
650 if (!data)
651 data = &local_strlen_data;
653 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
655 tree src = STRIP_NOPS (arg);
656 if (TREE_CODE (src) == COND_EXPR
657 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
659 tree len1, len2;
661 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
662 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
663 if (tree_int_cst_equal (len1, len2))
664 return len1;
667 if (TREE_CODE (src) == COMPOUND_EXPR
668 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
669 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
671 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
673 /* Offset from the beginning of the string in bytes. */
674 tree byteoff;
675 tree memsize;
676 tree decl;
677 src = string_constant (src, &byteoff, &memsize, &decl);
678 if (src == 0)
679 return NULL_TREE;
681 /* Determine the size of the string element. */
682 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
683 return NULL_TREE;
685 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
686 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
687 in case the latter is less than the size of the array, such as when
688 SRC refers to a short string literal used to initialize a large array.
689 In that case, the elements of the array after the terminating NUL are
690 all NUL. */
691 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
692 strelts = strelts / eltsize;
694 if (!tree_fits_uhwi_p (memsize))
695 return NULL_TREE;
697 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
699 /* PTR can point to the byte representation of any string type, including
700 char* and wchar_t*. */
701 const char *ptr = TREE_STRING_POINTER (src);
703 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
705 /* The code below works only for single byte character types. */
706 if (eltsize != 1)
707 return NULL_TREE;
709 /* If the string has an internal NUL character followed by any
710 non-NUL characters (e.g., "foo\0bar"), we can't compute
711 the offset to the following NUL if we don't know where to
712 start searching for it. */
713 unsigned len = string_length (ptr, eltsize, strelts);
715 /* Return when an embedded null character is found or none at all.
716 In the latter case, set the DECL/LEN field in the DATA structure
717 so that callers may examine them. */
718 if (len + 1 < strelts)
719 return NULL_TREE;
720 else if (len >= maxelts)
722 data->decl = decl;
723 data->off = byteoff;
724 data->minlen = ssize_int (len);
725 return NULL_TREE;
728 /* For empty strings the result should be zero. */
729 if (len == 0)
730 return ssize_int (0);
732 /* We don't know the starting offset, but we do know that the string
733 has no internal zero bytes. If the offset falls within the bounds
734 of the string subtract the offset from the length of the string,
735 and return that. Otherwise the length is zero. Take care to
736 use SAVE_EXPR in case the OFFSET has side-effects. */
737 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
738 : byteoff;
739 offsave = fold_convert_loc (loc, sizetype, offsave);
740 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
741 size_int (len));
742 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
743 offsave);
744 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
745 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
746 build_zero_cst (ssizetype));
749 /* Offset from the beginning of the string in elements. */
750 HOST_WIDE_INT eltoff;
752 /* We have a known offset into the string. Start searching there for
753 a null character if we can represent it as a single HOST_WIDE_INT. */
754 if (byteoff == 0)
755 eltoff = 0;
756 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
757 eltoff = -1;
758 else
759 eltoff = tree_to_uhwi (byteoff) / eltsize;
761 /* If the offset is known to be out of bounds, warn, and call strlen at
762 runtime. */
763 if (eltoff < 0 || eltoff >= maxelts)
765 /* Suppress multiple warnings for propagated constant strings. */
766 if (only_value != 2
767 && !TREE_NO_WARNING (arg)
768 && warning_at (loc, OPT_Warray_bounds,
769 "offset %qwi outside bounds of constant string",
770 eltoff))
772 if (decl)
773 inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
774 TREE_NO_WARNING (arg) = 1;
776 return NULL_TREE;
779 /* If eltoff is larger than strelts but less than maxelts the
780 string length is zero, since the excess memory will be zero. */
781 if (eltoff > strelts)
782 return ssize_int (0);
784 /* Use strlen to search for the first zero byte. Since any strings
785 constructed with build_string will have nulls appended, we win even
786 if we get handed something like (char[4])"abcd".
788 Since ELTOFF is our starting index into the string, no further
789 calculation is needed. */
790 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
791 strelts - eltoff);
793 /* Don't know what to return if there was no zero termination.
794 Ideally this would turn into a gcc_checking_assert over time.
795 Set DECL/LEN so callers can examine them. */
796 if (len >= maxelts - eltoff)
798 data->decl = decl;
799 data->off = byteoff;
800 data->minlen = ssize_int (len);
801 return NULL_TREE;
804 return ssize_int (len);
807 /* Return a constant integer corresponding to target reading
808 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
809 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
810 are assumed to be zero, otherwise it reads as many characters
811 as needed. */
814 c_readstr (const char *str, scalar_int_mode mode,
815 bool null_terminated_p/*=true*/)
817 HOST_WIDE_INT ch;
818 unsigned int i, j;
819 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
821 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
822 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
823 / HOST_BITS_PER_WIDE_INT;
825 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
826 for (i = 0; i < len; i++)
827 tmp[i] = 0;
829 ch = 1;
830 for (i = 0; i < GET_MODE_SIZE (mode); i++)
832 j = i;
833 if (WORDS_BIG_ENDIAN)
834 j = GET_MODE_SIZE (mode) - i - 1;
835 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
836 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
837 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
838 j *= BITS_PER_UNIT;
840 if (ch || !null_terminated_p)
841 ch = (unsigned char) str[i];
842 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
845 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
846 return immed_wide_int_const (c, mode);
849 /* Cast a target constant CST to target CHAR and if that value fits into
850 host char type, return zero and put that value into variable pointed to by
851 P. */
853 static int
854 target_char_cast (tree cst, char *p)
856 unsigned HOST_WIDE_INT val, hostval;
858 if (TREE_CODE (cst) != INTEGER_CST
859 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
860 return 1;
862 /* Do not care if it fits or not right here. */
863 val = TREE_INT_CST_LOW (cst);
865 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
866 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
868 hostval = val;
869 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
870 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
872 if (val != hostval)
873 return 1;
875 *p = hostval;
876 return 0;
879 /* Similar to save_expr, but assumes that arbitrary code is not executed
880 in between the multiple evaluations. In particular, we assume that a
881 non-addressable local variable will not be modified. */
883 static tree
884 builtin_save_expr (tree exp)
886 if (TREE_CODE (exp) == SSA_NAME
887 || (TREE_ADDRESSABLE (exp) == 0
888 && (TREE_CODE (exp) == PARM_DECL
889 || (VAR_P (exp) && !TREE_STATIC (exp)))))
890 return exp;
892 return save_expr (exp);
895 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
896 times to get the address of either a higher stack frame, or a return
897 address located within it (depending on FNDECL_CODE). */
899 static rtx
900 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
902 int i;
903 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
904 if (tem == NULL_RTX)
906 /* For a zero count with __builtin_return_address, we don't care what
907 frame address we return, because target-specific definitions will
908 override us. Therefore frame pointer elimination is OK, and using
909 the soft frame pointer is OK.
911 For a nonzero count, or a zero count with __builtin_frame_address,
912 we require a stable offset from the current frame pointer to the
913 previous one, so we must use the hard frame pointer, and
914 we must disable frame pointer elimination. */
915 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
916 tem = frame_pointer_rtx;
917 else
919 tem = hard_frame_pointer_rtx;
921 /* Tell reload not to eliminate the frame pointer. */
922 crtl->accesses_prior_frames = 1;
926 if (count > 0)
927 SETUP_FRAME_ADDRESSES ();
929 /* On the SPARC, the return address is not in the frame, it is in a
930 register. There is no way to access it off of the current frame
931 pointer, but it can be accessed off the previous frame pointer by
932 reading the value from the register window save area. */
933 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
934 count--;
936 /* Scan back COUNT frames to the specified frame. */
937 for (i = 0; i < count; i++)
939 /* Assume the dynamic chain pointer is in the word that the
940 frame address points to, unless otherwise specified. */
941 tem = DYNAMIC_CHAIN_ADDRESS (tem);
942 tem = memory_address (Pmode, tem);
943 tem = gen_frame_mem (Pmode, tem);
944 tem = copy_to_reg (tem);
947 /* For __builtin_frame_address, return what we've got. But, on
948 the SPARC for example, we may have to add a bias. */
949 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
950 return FRAME_ADDR_RTX (tem);
952 /* For __builtin_return_address, get the return address from that frame. */
953 #ifdef RETURN_ADDR_RTX
954 tem = RETURN_ADDR_RTX (count, tem);
955 #else
956 tem = memory_address (Pmode,
957 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
958 tem = gen_frame_mem (Pmode, tem);
959 #endif
960 return tem;
963 /* Alias set used for setjmp buffer. */
964 static alias_set_type setjmp_alias_set = -1;
966 /* Construct the leading half of a __builtin_setjmp call. Control will
967 return to RECEIVER_LABEL. This is also called directly by the SJLJ
968 exception handling code. */
970 void
971 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
973 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
974 rtx stack_save;
975 rtx mem;
977 if (setjmp_alias_set == -1)
978 setjmp_alias_set = new_alias_set ();
980 buf_addr = convert_memory_address (Pmode, buf_addr);
982 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
984 /* We store the frame pointer and the address of receiver_label in
985 the buffer and use the rest of it for the stack save area, which
986 is machine-dependent. */
988 mem = gen_rtx_MEM (Pmode, buf_addr);
989 set_mem_alias_set (mem, setjmp_alias_set);
990 emit_move_insn (mem, hard_frame_pointer_rtx);
992 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
993 GET_MODE_SIZE (Pmode))),
994 set_mem_alias_set (mem, setjmp_alias_set);
996 emit_move_insn (validize_mem (mem),
997 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
999 stack_save = gen_rtx_MEM (sa_mode,
1000 plus_constant (Pmode, buf_addr,
1001 2 * GET_MODE_SIZE (Pmode)));
1002 set_mem_alias_set (stack_save, setjmp_alias_set);
1003 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1005 /* If there is further processing to do, do it. */
1006 if (targetm.have_builtin_setjmp_setup ())
1007 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
1009 /* We have a nonlocal label. */
1010 cfun->has_nonlocal_label = 1;
1013 /* Construct the trailing part of a __builtin_setjmp call. This is
1014 also called directly by the SJLJ exception handling code.
1015 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
1017 void
1018 expand_builtin_setjmp_receiver (rtx receiver_label)
1020 rtx chain;
1022 /* Mark the FP as used when we get here, so we have to make sure it's
1023 marked as used by this function. */
1024 emit_use (hard_frame_pointer_rtx);
1026 /* Mark the static chain as clobbered here so life information
1027 doesn't get messed up for it. */
1028 chain = rtx_for_static_chain (current_function_decl, true);
1029 if (chain && REG_P (chain))
1030 emit_clobber (chain);
1032 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
1034 /* If the argument pointer can be eliminated in favor of the
1035 frame pointer, we don't need to restore it. We assume here
1036 that if such an elimination is present, it can always be used.
1037 This is the case on all known machines; if we don't make this
1038 assumption, we do unnecessary saving on many machines. */
1039 size_t i;
1040 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1042 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1043 if (elim_regs[i].from == ARG_POINTER_REGNUM
1044 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1045 break;
1047 if (i == ARRAY_SIZE (elim_regs))
1049 /* Now restore our arg pointer from the address at which it
1050 was saved in our stack frame. */
1051 emit_move_insn (crtl->args.internal_arg_pointer,
1052 copy_to_reg (get_arg_pointer_save_area ()));
1056 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
1057 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
1058 else if (targetm.have_nonlocal_goto_receiver ())
1059 emit_insn (targetm.gen_nonlocal_goto_receiver ());
1060 else
1061 { /* Nothing */ }
1063 /* We must not allow the code we just generated to be reordered by
1064 scheduling. Specifically, the update of the frame pointer must
1065 happen immediately, not later. */
1066 emit_insn (gen_blockage ());
1069 /* __builtin_longjmp is passed a pointer to an array of five words (not
1070 all will be used on all machines). It operates similarly to the C
1071 library function of the same name, but is more efficient. Much of
1072 the code below is copied from the handling of non-local gotos. */
1074 static void
1075 expand_builtin_longjmp (rtx buf_addr, rtx value)
1077 rtx fp, lab, stack;
1078 rtx_insn *insn, *last;
1079 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1081 /* DRAP is needed for stack realign if longjmp is expanded to current
1082 function */
1083 if (SUPPORTS_STACK_ALIGNMENT)
1084 crtl->need_drap = true;
1086 if (setjmp_alias_set == -1)
1087 setjmp_alias_set = new_alias_set ();
1089 buf_addr = convert_memory_address (Pmode, buf_addr);
1091 buf_addr = force_reg (Pmode, buf_addr);
1093 /* We require that the user must pass a second argument of 1, because
1094 that is what builtin_setjmp will return. */
1095 gcc_assert (value == const1_rtx);
1097 last = get_last_insn ();
1098 if (targetm.have_builtin_longjmp ())
1099 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1100 else
1102 fp = gen_rtx_MEM (Pmode, buf_addr);
1103 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1104 GET_MODE_SIZE (Pmode)));
1106 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1107 2 * GET_MODE_SIZE (Pmode)));
1108 set_mem_alias_set (fp, setjmp_alias_set);
1109 set_mem_alias_set (lab, setjmp_alias_set);
1110 set_mem_alias_set (stack, setjmp_alias_set);
1112 /* Pick up FP, label, and SP from the block and jump. This code is
1113 from expand_goto in stmt.c; see there for detailed comments. */
1114 if (targetm.have_nonlocal_goto ())
1115 /* We have to pass a value to the nonlocal_goto pattern that will
1116 get copied into the static_chain pointer, but it does not matter
1117 what that value is, because builtin_setjmp does not use it. */
1118 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1119 else
1121 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1122 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1124 lab = copy_to_reg (lab);
1126 /* Restore the frame pointer and stack pointer. We must use a
1127 temporary since the setjmp buffer may be a local. */
1128 fp = copy_to_reg (fp);
1129 emit_stack_restore (SAVE_NONLOCAL, stack);
1131 /* Ensure the frame pointer move is not optimized. */
1132 emit_insn (gen_blockage ());
1133 emit_clobber (hard_frame_pointer_rtx);
1134 emit_clobber (frame_pointer_rtx);
1135 emit_move_insn (hard_frame_pointer_rtx, fp);
1137 emit_use (hard_frame_pointer_rtx);
1138 emit_use (stack_pointer_rtx);
1139 emit_indirect_jump (lab);
1143 /* Search backwards and mark the jump insn as a non-local goto.
1144 Note that this precludes the use of __builtin_longjmp to a
1145 __builtin_setjmp target in the same function. However, we've
1146 already cautioned the user that these functions are for
1147 internal exception handling use only. */
1148 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1150 gcc_assert (insn != last);
1152 if (JUMP_P (insn))
1154 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1155 break;
1157 else if (CALL_P (insn))
1158 break;
1162 static inline bool
1163 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1165 return (iter->i < iter->n);
1168 /* This function validates the types of a function call argument list
1169 against a specified list of tree_codes. If the last specifier is a 0,
1170 that represents an ellipsis, otherwise the last specifier must be a
1171 VOID_TYPE. */
1173 static bool
1174 validate_arglist (const_tree callexpr, ...)
1176 enum tree_code code;
1177 bool res = 0;
1178 va_list ap;
1179 const_call_expr_arg_iterator iter;
1180 const_tree arg;
1182 va_start (ap, callexpr);
1183 init_const_call_expr_arg_iterator (callexpr, &iter);
1185 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1186 tree fn = CALL_EXPR_FN (callexpr);
1187 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1189 for (unsigned argno = 1; ; ++argno)
1191 code = (enum tree_code) va_arg (ap, int);
1193 switch (code)
1195 case 0:
1196 /* This signifies an ellipses, any further arguments are all ok. */
1197 res = true;
1198 goto end;
1199 case VOID_TYPE:
1200 /* This signifies an endlink, if no arguments remain, return
1201 true, otherwise return false. */
1202 res = !more_const_call_expr_args_p (&iter);
1203 goto end;
1204 case POINTER_TYPE:
1205 /* The actual argument must be nonnull when either the whole
1206 called function has been declared nonnull, or when the formal
1207 argument corresponding to the actual argument has been. */
1208 if (argmap
1209 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1211 arg = next_const_call_expr_arg (&iter);
1212 if (!validate_arg (arg, code) || integer_zerop (arg))
1213 goto end;
1214 break;
1216 /* FALLTHRU */
1217 default:
1218 /* If no parameters remain or the parameter's code does not
1219 match the specified code, return false. Otherwise continue
1220 checking any remaining arguments. */
1221 arg = next_const_call_expr_arg (&iter);
1222 if (!validate_arg (arg, code))
1223 goto end;
1224 break;
1228 /* We need gotos here since we can only have one VA_CLOSE in a
1229 function. */
1230 end: ;
1231 va_end (ap);
1233 BITMAP_FREE (argmap);
1235 return res;
1238 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1239 and the address of the save area. */
1241 static rtx
1242 expand_builtin_nonlocal_goto (tree exp)
1244 tree t_label, t_save_area;
1245 rtx r_label, r_save_area, r_fp, r_sp;
1246 rtx_insn *insn;
1248 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1249 return NULL_RTX;
1251 t_label = CALL_EXPR_ARG (exp, 0);
1252 t_save_area = CALL_EXPR_ARG (exp, 1);
1254 r_label = expand_normal (t_label);
1255 r_label = convert_memory_address (Pmode, r_label);
1256 r_save_area = expand_normal (t_save_area);
1257 r_save_area = convert_memory_address (Pmode, r_save_area);
1258 /* Copy the address of the save location to a register just in case it was
1259 based on the frame pointer. */
1260 r_save_area = copy_to_reg (r_save_area);
1261 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1262 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1263 plus_constant (Pmode, r_save_area,
1264 GET_MODE_SIZE (Pmode)));
1266 crtl->has_nonlocal_goto = 1;
1268 /* ??? We no longer need to pass the static chain value, afaik. */
1269 if (targetm.have_nonlocal_goto ())
1270 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1271 else
1273 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1274 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1276 r_label = copy_to_reg (r_label);
1278 /* Restore the frame pointer and stack pointer. We must use a
1279 temporary since the setjmp buffer may be a local. */
1280 r_fp = copy_to_reg (r_fp);
1281 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1283 /* Ensure the frame pointer move is not optimized. */
1284 emit_insn (gen_blockage ());
1285 emit_clobber (hard_frame_pointer_rtx);
1286 emit_clobber (frame_pointer_rtx);
1287 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1289 /* USE of hard_frame_pointer_rtx added for consistency;
1290 not clear if really needed. */
1291 emit_use (hard_frame_pointer_rtx);
1292 emit_use (stack_pointer_rtx);
1294 /* If the architecture is using a GP register, we must
1295 conservatively assume that the target function makes use of it.
1296 The prologue of functions with nonlocal gotos must therefore
1297 initialize the GP register to the appropriate value, and we
1298 must then make sure that this value is live at the point
1299 of the jump. (Note that this doesn't necessarily apply
1300 to targets with a nonlocal_goto pattern; they are free
1301 to implement it in their own way. Note also that this is
1302 a no-op if the GP register is a global invariant.) */
1303 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1304 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1305 emit_use (pic_offset_table_rtx);
1307 emit_indirect_jump (r_label);
1310 /* Search backwards to the jump insn and mark it as a
1311 non-local goto. */
1312 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1314 if (JUMP_P (insn))
1316 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1317 break;
1319 else if (CALL_P (insn))
1320 break;
1323 return const0_rtx;
1326 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1327 (not all will be used on all machines) that was passed to __builtin_setjmp.
1328 It updates the stack pointer in that block to the current value. This is
1329 also called directly by the SJLJ exception handling code. */
1331 void
1332 expand_builtin_update_setjmp_buf (rtx buf_addr)
1334 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1335 buf_addr = convert_memory_address (Pmode, buf_addr);
1336 rtx stack_save
1337 = gen_rtx_MEM (sa_mode,
1338 memory_address
1339 (sa_mode,
1340 plus_constant (Pmode, buf_addr,
1341 2 * GET_MODE_SIZE (Pmode))));
1343 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1346 /* Expand a call to __builtin_prefetch. For a target that does not support
1347 data prefetch, evaluate the memory address argument in case it has side
1348 effects. */
1350 static void
1351 expand_builtin_prefetch (tree exp)
1353 tree arg0, arg1, arg2;
1354 int nargs;
1355 rtx op0, op1, op2;
1357 if (!validate_arglist (exp, POINTER_TYPE, 0))
1358 return;
1360 arg0 = CALL_EXPR_ARG (exp, 0);
1362 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1363 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1364 locality). */
1365 nargs = call_expr_nargs (exp);
1366 if (nargs > 1)
1367 arg1 = CALL_EXPR_ARG (exp, 1);
1368 else
1369 arg1 = integer_zero_node;
1370 if (nargs > 2)
1371 arg2 = CALL_EXPR_ARG (exp, 2);
1372 else
1373 arg2 = integer_three_node;
1375 /* Argument 0 is an address. */
1376 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1378 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1379 if (TREE_CODE (arg1) != INTEGER_CST)
1381 error ("second argument to %<__builtin_prefetch%> must be a constant");
1382 arg1 = integer_zero_node;
1384 op1 = expand_normal (arg1);
1385 /* Argument 1 must be either zero or one. */
1386 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1388 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1389 " using zero");
1390 op1 = const0_rtx;
1393 /* Argument 2 (locality) must be a compile-time constant int. */
1394 if (TREE_CODE (arg2) != INTEGER_CST)
1396 error ("third argument to %<__builtin_prefetch%> must be a constant");
1397 arg2 = integer_zero_node;
1399 op2 = expand_normal (arg2);
1400 /* Argument 2 must be 0, 1, 2, or 3. */
1401 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1403 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1404 op2 = const0_rtx;
1407 if (targetm.have_prefetch ())
1409 class expand_operand ops[3];
1411 create_address_operand (&ops[0], op0);
1412 create_integer_operand (&ops[1], INTVAL (op1));
1413 create_integer_operand (&ops[2], INTVAL (op2));
1414 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1415 return;
1418 /* Don't do anything with direct references to volatile memory, but
1419 generate code to handle other side effects. */
1420 if (!MEM_P (op0) && side_effects_p (op0))
1421 emit_insn (op0);
1424 /* Get a MEM rtx for expression EXP which is the address of an operand
1425 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
1426 the maximum length of the block of memory that might be accessed or
1427 NULL if unknown. */
1429 static rtx
1430 get_memory_rtx (tree exp, tree len)
1432 tree orig_exp = exp;
1433 rtx addr, mem;
1435 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1436 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1437 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1438 exp = TREE_OPERAND (exp, 0);
1440 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1441 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1443 /* Get an expression we can use to find the attributes to assign to MEM.
1444 First remove any nops. */
1445 while (CONVERT_EXPR_P (exp)
1446 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1447 exp = TREE_OPERAND (exp, 0);
1449 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1450 (as builtin stringops may alias with anything). */
1451 exp = fold_build2 (MEM_REF,
1452 build_array_type (char_type_node,
1453 build_range_type (sizetype,
1454 size_one_node, len)),
1455 exp, build_int_cst (ptr_type_node, 0));
1457 /* If the MEM_REF has no acceptable address, try to get the base object
1458 from the original address we got, and build an all-aliasing
1459 unknown-sized access to that one. */
1460 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1461 set_mem_attributes (mem, exp, 0);
1462 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1463 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1464 0))))
1466 exp = build_fold_addr_expr (exp);
1467 exp = fold_build2 (MEM_REF,
1468 build_array_type (char_type_node,
1469 build_range_type (sizetype,
1470 size_zero_node,
1471 NULL)),
1472 exp, build_int_cst (ptr_type_node, 0));
1473 set_mem_attributes (mem, exp, 0);
1475 set_mem_alias_set (mem, 0);
1476 return mem;
1479 /* Built-in functions to perform an untyped call and return. */
1481 #define apply_args_mode \
1482 (this_target_builtins->x_apply_args_mode)
1483 #define apply_result_mode \
1484 (this_target_builtins->x_apply_result_mode)
1486 /* Return the size required for the block returned by __builtin_apply_args,
1487 and initialize apply_args_mode. */
1489 static int
1490 apply_args_size (void)
1492 static int size = -1;
1493 int align;
1494 unsigned int regno;
1496 /* The values computed by this function never change. */
1497 if (size < 0)
1499 /* The first value is the incoming arg-pointer. */
1500 size = GET_MODE_SIZE (Pmode);
1502 /* The second value is the structure value address unless this is
1503 passed as an "invisible" first argument. */
1504 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1505 size += GET_MODE_SIZE (Pmode);
1507 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1508 if (FUNCTION_ARG_REGNO_P (regno))
1510 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1512 gcc_assert (mode != VOIDmode);
1514 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1515 if (size % align != 0)
1516 size = CEIL (size, align) * align;
1517 size += GET_MODE_SIZE (mode);
1518 apply_args_mode[regno] = mode;
1520 else
1522 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1525 return size;
1528 /* Return the size required for the block returned by __builtin_apply,
1529 and initialize apply_result_mode. */
1531 static int
1532 apply_result_size (void)
1534 static int size = -1;
1535 int align, regno;
1537 /* The values computed by this function never change. */
1538 if (size < 0)
1540 size = 0;
1542 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1543 if (targetm.calls.function_value_regno_p (regno))
1545 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1547 gcc_assert (mode != VOIDmode);
1549 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1550 if (size % align != 0)
1551 size = CEIL (size, align) * align;
1552 size += GET_MODE_SIZE (mode);
1553 apply_result_mode[regno] = mode;
1555 else
1556 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1558 /* Allow targets that use untyped_call and untyped_return to override
1559 the size so that machine-specific information can be stored here. */
1560 #ifdef APPLY_RESULT_SIZE
1561 size = APPLY_RESULT_SIZE;
1562 #endif
1564 return size;
1567 /* Create a vector describing the result block RESULT. If SAVEP is true,
1568 the result block is used to save the values; otherwise it is used to
1569 restore the values. */
1571 static rtx
1572 result_vector (int savep, rtx result)
1574 int regno, size, align, nelts;
1575 fixed_size_mode mode;
1576 rtx reg, mem;
1577 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1579 size = nelts = 0;
1580 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1581 if ((mode = apply_result_mode[regno]) != VOIDmode)
1583 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1584 if (size % align != 0)
1585 size = CEIL (size, align) * align;
1586 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1587 mem = adjust_address (result, mode, size);
1588 savevec[nelts++] = (savep
1589 ? gen_rtx_SET (mem, reg)
1590 : gen_rtx_SET (reg, mem));
1591 size += GET_MODE_SIZE (mode);
1593 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1596 /* Save the state required to perform an untyped call with the same
1597 arguments as were passed to the current function. */
1599 static rtx
1600 expand_builtin_apply_args_1 (void)
1602 rtx registers, tem;
1603 int size, align, regno;
1604 fixed_size_mode mode;
1605 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1607 /* Create a block where the arg-pointer, structure value address,
1608 and argument registers can be saved. */
1609 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1611 /* Walk past the arg-pointer and structure value address. */
1612 size = GET_MODE_SIZE (Pmode);
1613 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1614 size += GET_MODE_SIZE (Pmode);
1616 /* Save each register used in calling a function to the block. */
1617 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1618 if ((mode = apply_args_mode[regno]) != VOIDmode)
1620 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1621 if (size % align != 0)
1622 size = CEIL (size, align) * align;
1624 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1626 emit_move_insn (adjust_address (registers, mode, size), tem);
1627 size += GET_MODE_SIZE (mode);
1630 /* Save the arg pointer to the block. */
1631 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1632 /* We need the pointer as the caller actually passed them to us, not
1633 as we might have pretended they were passed. Make sure it's a valid
1634 operand, as emit_move_insn isn't expected to handle a PLUS. */
1635 if (STACK_GROWS_DOWNWARD)
1637 = force_operand (plus_constant (Pmode, tem,
1638 crtl->args.pretend_args_size),
1639 NULL_RTX);
1640 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1642 size = GET_MODE_SIZE (Pmode);
1644 /* Save the structure value address unless this is passed as an
1645 "invisible" first argument. */
1646 if (struct_incoming_value)
1647 emit_move_insn (adjust_address (registers, Pmode, size),
1648 copy_to_reg (struct_incoming_value));
1650 /* Return the address of the block. */
1651 return copy_addr_to_reg (XEXP (registers, 0));
1654 /* __builtin_apply_args returns block of memory allocated on
1655 the stack into which is stored the arg pointer, structure
1656 value address, static chain, and all the registers that might
1657 possibly be used in performing a function call. The code is
1658 moved to the start of the function so the incoming values are
1659 saved. */
1661 static rtx
1662 expand_builtin_apply_args (void)
1664 /* Don't do __builtin_apply_args more than once in a function.
1665 Save the result of the first call and reuse it. */
1666 if (apply_args_value != 0)
1667 return apply_args_value;
1669 /* When this function is called, it means that registers must be
1670 saved on entry to this function. So we migrate the
1671 call to the first insn of this function. */
1672 rtx temp;
1674 start_sequence ();
1675 temp = expand_builtin_apply_args_1 ();
1676 rtx_insn *seq = get_insns ();
1677 end_sequence ();
1679 apply_args_value = temp;
1681 /* Put the insns after the NOTE that starts the function.
1682 If this is inside a start_sequence, make the outer-level insn
1683 chain current, so the code is placed at the start of the
1684 function. If internal_arg_pointer is a non-virtual pseudo,
1685 it needs to be placed after the function that initializes
1686 that pseudo. */
1687 push_topmost_sequence ();
1688 if (REG_P (crtl->args.internal_arg_pointer)
1689 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1690 emit_insn_before (seq, parm_birth_insn);
1691 else
1692 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1693 pop_topmost_sequence ();
1694 return temp;
1698 /* Perform an untyped call and save the state required to perform an
1699 untyped return of whatever value was returned by the given function. */
1701 static rtx
1702 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1704 int size, align, regno;
1705 fixed_size_mode mode;
1706 rtx incoming_args, result, reg, dest, src;
1707 rtx_call_insn *call_insn;
1708 rtx old_stack_level = 0;
1709 rtx call_fusage = 0;
1710 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1712 arguments = convert_memory_address (Pmode, arguments);
1714 /* Create a block where the return registers can be saved. */
1715 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1717 /* Fetch the arg pointer from the ARGUMENTS block. */
1718 incoming_args = gen_reg_rtx (Pmode);
1719 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1720 if (!STACK_GROWS_DOWNWARD)
1721 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1722 incoming_args, 0, OPTAB_LIB_WIDEN);
1724 /* Push a new argument block and copy the arguments. Do not allow
1725 the (potential) memcpy call below to interfere with our stack
1726 manipulations. */
1727 do_pending_stack_adjust ();
1728 NO_DEFER_POP;
1730 /* Save the stack with nonlocal if available. */
1731 if (targetm.have_save_stack_nonlocal ())
1732 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1733 else
1734 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1736 /* Allocate a block of memory onto the stack and copy the memory
1737 arguments to the outgoing arguments address. We can pass TRUE
1738 as the 4th argument because we just saved the stack pointer
1739 and will restore it right after the call. */
1740 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1742 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1743 may have already set current_function_calls_alloca to true.
1744 current_function_calls_alloca won't be set if argsize is zero,
1745 so we have to guarantee need_drap is true here. */
1746 if (SUPPORTS_STACK_ALIGNMENT)
1747 crtl->need_drap = true;
1749 dest = virtual_outgoing_args_rtx;
1750 if (!STACK_GROWS_DOWNWARD)
1752 if (CONST_INT_P (argsize))
1753 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1754 else
1755 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1757 dest = gen_rtx_MEM (BLKmode, dest);
1758 set_mem_align (dest, PARM_BOUNDARY);
1759 src = gen_rtx_MEM (BLKmode, incoming_args);
1760 set_mem_align (src, PARM_BOUNDARY);
1761 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1763 /* Refer to the argument block. */
1764 apply_args_size ();
1765 arguments = gen_rtx_MEM (BLKmode, arguments);
1766 set_mem_align (arguments, PARM_BOUNDARY);
1768 /* Walk past the arg-pointer and structure value address. */
1769 size = GET_MODE_SIZE (Pmode);
1770 if (struct_value)
1771 size += GET_MODE_SIZE (Pmode);
1773 /* Restore each of the registers previously saved. Make USE insns
1774 for each of these registers for use in making the call. */
1775 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1776 if ((mode = apply_args_mode[regno]) != VOIDmode)
1778 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1779 if (size % align != 0)
1780 size = CEIL (size, align) * align;
1781 reg = gen_rtx_REG (mode, regno);
1782 emit_move_insn (reg, adjust_address (arguments, mode, size));
1783 use_reg (&call_fusage, reg);
1784 size += GET_MODE_SIZE (mode);
1787 /* Restore the structure value address unless this is passed as an
1788 "invisible" first argument. */
1789 size = GET_MODE_SIZE (Pmode);
1790 if (struct_value)
1792 rtx value = gen_reg_rtx (Pmode);
1793 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1794 emit_move_insn (struct_value, value);
1795 if (REG_P (struct_value))
1796 use_reg (&call_fusage, struct_value);
1799 /* All arguments and registers used for the call are set up by now! */
1800 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1802 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1803 and we don't want to load it into a register as an optimization,
1804 because prepare_call_address already did it if it should be done. */
1805 if (GET_CODE (function) != SYMBOL_REF)
1806 function = memory_address (FUNCTION_MODE, function);
1808 /* Generate the actual call instruction and save the return value. */
1809 if (targetm.have_untyped_call ())
1811 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1812 emit_call_insn (targetm.gen_untyped_call (mem, result,
1813 result_vector (1, result)));
1815 else if (targetm.have_call_value ())
1817 rtx valreg = 0;
1819 /* Locate the unique return register. It is not possible to
1820 express a call that sets more than one return register using
1821 call_value; use untyped_call for that. In fact, untyped_call
1822 only needs to save the return registers in the given block. */
1823 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1824 if ((mode = apply_result_mode[regno]) != VOIDmode)
1826 gcc_assert (!valreg); /* have_untyped_call required. */
1828 valreg = gen_rtx_REG (mode, regno);
1831 emit_insn (targetm.gen_call_value (valreg,
1832 gen_rtx_MEM (FUNCTION_MODE, function),
1833 const0_rtx, NULL_RTX, const0_rtx));
1835 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1837 else
1838 gcc_unreachable ();
1840 /* Find the CALL insn we just emitted, and attach the register usage
1841 information. */
1842 call_insn = last_call_insn ();
1843 add_function_usage_to (call_insn, call_fusage);
1845 /* Restore the stack. */
1846 if (targetm.have_save_stack_nonlocal ())
1847 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1848 else
1849 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1850 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1852 OK_DEFER_POP;
1854 /* Return the address of the result block. */
1855 result = copy_addr_to_reg (XEXP (result, 0));
1856 return convert_memory_address (ptr_mode, result);
1859 /* Perform an untyped return. */
1861 static void
1862 expand_builtin_return (rtx result)
1864 int size, align, regno;
1865 fixed_size_mode mode;
1866 rtx reg;
1867 rtx_insn *call_fusage = 0;
1869 result = convert_memory_address (Pmode, result);
1871 apply_result_size ();
1872 result = gen_rtx_MEM (BLKmode, result);
1874 if (targetm.have_untyped_return ())
1876 rtx vector = result_vector (0, result);
1877 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1878 emit_barrier ();
1879 return;
1882 /* Restore the return value and note that each value is used. */
1883 size = 0;
1884 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1885 if ((mode = apply_result_mode[regno]) != VOIDmode)
1887 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1888 if (size % align != 0)
1889 size = CEIL (size, align) * align;
1890 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1891 emit_move_insn (reg, adjust_address (result, mode, size));
1893 push_to_sequence (call_fusage);
1894 emit_use (reg);
1895 call_fusage = get_insns ();
1896 end_sequence ();
1897 size += GET_MODE_SIZE (mode);
1900 /* Put the USE insns before the return. */
1901 emit_insn (call_fusage);
1903 /* Return whatever values was restored by jumping directly to the end
1904 of the function. */
1905 expand_naked_return ();
1908 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1910 static enum type_class
1911 type_to_class (tree type)
1913 switch (TREE_CODE (type))
1915 case VOID_TYPE: return void_type_class;
1916 case INTEGER_TYPE: return integer_type_class;
1917 case ENUMERAL_TYPE: return enumeral_type_class;
1918 case BOOLEAN_TYPE: return boolean_type_class;
1919 case POINTER_TYPE: return pointer_type_class;
1920 case REFERENCE_TYPE: return reference_type_class;
1921 case OFFSET_TYPE: return offset_type_class;
1922 case REAL_TYPE: return real_type_class;
1923 case COMPLEX_TYPE: return complex_type_class;
1924 case FUNCTION_TYPE: return function_type_class;
1925 case METHOD_TYPE: return method_type_class;
1926 case RECORD_TYPE: return record_type_class;
1927 case UNION_TYPE:
1928 case QUAL_UNION_TYPE: return union_type_class;
1929 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1930 ? string_type_class : array_type_class);
1931 case LANG_TYPE: return lang_type_class;
1932 default: return no_type_class;
1936 /* Expand a call EXP to __builtin_classify_type. */
1938 static rtx
1939 expand_builtin_classify_type (tree exp)
1941 if (call_expr_nargs (exp))
1942 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1943 return GEN_INT (no_type_class);
1946 /* This helper macro, meant to be used in mathfn_built_in below, determines
1947 which among a set of builtin math functions is appropriate for a given type
1948 mode. The `F' (float) and `L' (long double) are automatically generated
1949 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1950 types, there are additional types that are considered with 'F32', 'F64',
1951 'F128', etc. suffixes. */
1952 #define CASE_MATHFN(MATHFN) \
1953 CASE_CFN_##MATHFN: \
1954 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1955 fcodel = BUILT_IN_##MATHFN##L ; break;
1956 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1957 types. */
1958 #define CASE_MATHFN_FLOATN(MATHFN) \
1959 CASE_CFN_##MATHFN: \
1960 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1961 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1962 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1963 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1964 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1965 break;
1966 /* Similar to above, but appends _R after any F/L suffix. */
1967 #define CASE_MATHFN_REENT(MATHFN) \
1968 case CFN_BUILT_IN_##MATHFN##_R: \
1969 case CFN_BUILT_IN_##MATHFN##F_R: \
1970 case CFN_BUILT_IN_##MATHFN##L_R: \
1971 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1972 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1974 /* Return a function equivalent to FN but operating on floating-point
1975 values of type TYPE, or END_BUILTINS if no such function exists.
1976 This is purely an operation on function codes; it does not guarantee
1977 that the target actually has an implementation of the function. */
1979 static built_in_function
1980 mathfn_built_in_2 (tree type, combined_fn fn)
1982 tree mtype;
1983 built_in_function fcode, fcodef, fcodel;
1984 built_in_function fcodef16 = END_BUILTINS;
1985 built_in_function fcodef32 = END_BUILTINS;
1986 built_in_function fcodef64 = END_BUILTINS;
1987 built_in_function fcodef128 = END_BUILTINS;
1988 built_in_function fcodef32x = END_BUILTINS;
1989 built_in_function fcodef64x = END_BUILTINS;
1990 built_in_function fcodef128x = END_BUILTINS;
1992 switch (fn)
1994 CASE_MATHFN (ACOS)
1995 CASE_MATHFN (ACOSH)
1996 CASE_MATHFN (ASIN)
1997 CASE_MATHFN (ASINH)
1998 CASE_MATHFN (ATAN)
1999 CASE_MATHFN (ATAN2)
2000 CASE_MATHFN (ATANH)
2001 CASE_MATHFN (CBRT)
2002 CASE_MATHFN_FLOATN (CEIL)
2003 CASE_MATHFN (CEXPI)
2004 CASE_MATHFN_FLOATN (COPYSIGN)
2005 CASE_MATHFN (COS)
2006 CASE_MATHFN (COSH)
2007 CASE_MATHFN (DREM)
2008 CASE_MATHFN (ERF)
2009 CASE_MATHFN (ERFC)
2010 CASE_MATHFN (EXP)
2011 CASE_MATHFN (EXP10)
2012 CASE_MATHFN (EXP2)
2013 CASE_MATHFN (EXPM1)
2014 CASE_MATHFN (FABS)
2015 CASE_MATHFN (FDIM)
2016 CASE_MATHFN_FLOATN (FLOOR)
2017 CASE_MATHFN_FLOATN (FMA)
2018 CASE_MATHFN_FLOATN (FMAX)
2019 CASE_MATHFN_FLOATN (FMIN)
2020 CASE_MATHFN (FMOD)
2021 CASE_MATHFN (FREXP)
2022 CASE_MATHFN (GAMMA)
2023 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
2024 CASE_MATHFN (HUGE_VAL)
2025 CASE_MATHFN (HYPOT)
2026 CASE_MATHFN (ILOGB)
2027 CASE_MATHFN (ICEIL)
2028 CASE_MATHFN (IFLOOR)
2029 CASE_MATHFN (INF)
2030 CASE_MATHFN (IRINT)
2031 CASE_MATHFN (IROUND)
2032 CASE_MATHFN (ISINF)
2033 CASE_MATHFN (J0)
2034 CASE_MATHFN (J1)
2035 CASE_MATHFN (JN)
2036 CASE_MATHFN (LCEIL)
2037 CASE_MATHFN (LDEXP)
2038 CASE_MATHFN (LFLOOR)
2039 CASE_MATHFN (LGAMMA)
2040 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
2041 CASE_MATHFN (LLCEIL)
2042 CASE_MATHFN (LLFLOOR)
2043 CASE_MATHFN (LLRINT)
2044 CASE_MATHFN (LLROUND)
2045 CASE_MATHFN (LOG)
2046 CASE_MATHFN (LOG10)
2047 CASE_MATHFN (LOG1P)
2048 CASE_MATHFN (LOG2)
2049 CASE_MATHFN (LOGB)
2050 CASE_MATHFN (LRINT)
2051 CASE_MATHFN (LROUND)
2052 CASE_MATHFN (MODF)
2053 CASE_MATHFN (NAN)
2054 CASE_MATHFN (NANS)
2055 CASE_MATHFN_FLOATN (NEARBYINT)
2056 CASE_MATHFN (NEXTAFTER)
2057 CASE_MATHFN (NEXTTOWARD)
2058 CASE_MATHFN (POW)
2059 CASE_MATHFN (POWI)
2060 CASE_MATHFN (POW10)
2061 CASE_MATHFN (REMAINDER)
2062 CASE_MATHFN (REMQUO)
2063 CASE_MATHFN_FLOATN (RINT)
2064 CASE_MATHFN_FLOATN (ROUND)
2065 CASE_MATHFN_FLOATN (ROUNDEVEN)
2066 CASE_MATHFN (SCALB)
2067 CASE_MATHFN (SCALBLN)
2068 CASE_MATHFN (SCALBN)
2069 CASE_MATHFN (SIGNBIT)
2070 CASE_MATHFN (SIGNIFICAND)
2071 CASE_MATHFN (SIN)
2072 CASE_MATHFN (SINCOS)
2073 CASE_MATHFN (SINH)
2074 CASE_MATHFN_FLOATN (SQRT)
2075 CASE_MATHFN (TAN)
2076 CASE_MATHFN (TANH)
2077 CASE_MATHFN (TGAMMA)
2078 CASE_MATHFN_FLOATN (TRUNC)
2079 CASE_MATHFN (Y0)
2080 CASE_MATHFN (Y1)
2081 CASE_MATHFN (YN)
2083 default:
2084 return END_BUILTINS;
2087 mtype = TYPE_MAIN_VARIANT (type);
2088 if (mtype == double_type_node)
2089 return fcode;
2090 else if (mtype == float_type_node)
2091 return fcodef;
2092 else if (mtype == long_double_type_node)
2093 return fcodel;
2094 else if (mtype == float16_type_node)
2095 return fcodef16;
2096 else if (mtype == float32_type_node)
2097 return fcodef32;
2098 else if (mtype == float64_type_node)
2099 return fcodef64;
2100 else if (mtype == float128_type_node)
2101 return fcodef128;
2102 else if (mtype == float32x_type_node)
2103 return fcodef32x;
2104 else if (mtype == float64x_type_node)
2105 return fcodef64x;
2106 else if (mtype == float128x_type_node)
2107 return fcodef128x;
2108 else
2109 return END_BUILTINS;
2112 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2113 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2114 otherwise use the explicit declaration. If we can't do the conversion,
2115 return null. */
2117 static tree
2118 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2120 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2121 if (fcode2 == END_BUILTINS)
2122 return NULL_TREE;
2124 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2125 return NULL_TREE;
2127 return builtin_decl_explicit (fcode2);
2130 /* Like mathfn_built_in_1, but always use the implicit array. */
2132 tree
2133 mathfn_built_in (tree type, combined_fn fn)
2135 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2138 /* Like mathfn_built_in_1, but take a built_in_function and
2139 always use the implicit array. */
2141 tree
2142 mathfn_built_in (tree type, enum built_in_function fn)
2144 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2147 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2148 return its code, otherwise return IFN_LAST. Note that this function
2149 only tests whether the function is defined in internals.def, not whether
2150 it is actually available on the target. */
2152 internal_fn
2153 associated_internal_fn (tree fndecl)
2155 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2156 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2157 switch (DECL_FUNCTION_CODE (fndecl))
2159 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2160 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2161 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2162 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2163 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2164 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2165 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2166 #include "internal-fn.def"
2168 CASE_FLT_FN (BUILT_IN_POW10):
2169 return IFN_EXP10;
2171 CASE_FLT_FN (BUILT_IN_DREM):
2172 return IFN_REMAINDER;
2174 CASE_FLT_FN (BUILT_IN_SCALBN):
2175 CASE_FLT_FN (BUILT_IN_SCALBLN):
2176 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2177 return IFN_LDEXP;
2178 return IFN_LAST;
2180 default:
2181 return IFN_LAST;
2185 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2186 on the current target by a call to an internal function, return the
2187 code of that internal function, otherwise return IFN_LAST. The caller
2188 is responsible for ensuring that any side-effects of the built-in
2189 call are dealt with correctly. E.g. if CALL sets errno, the caller
2190 must decide that the errno result isn't needed or make it available
2191 in some other way. */
2193 internal_fn
2194 replacement_internal_fn (gcall *call)
2196 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2198 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2199 if (ifn != IFN_LAST)
2201 tree_pair types = direct_internal_fn_types (ifn, call);
2202 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2203 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2204 return ifn;
2207 return IFN_LAST;
2210 /* Expand a call to the builtin trinary math functions (fma).
2211 Return NULL_RTX if a normal call should be emitted rather than expanding the
2212 function in-line. EXP is the expression that is a call to the builtin
2213 function; if convenient, the result should be placed in TARGET.
2214 SUBTARGET may be used as the target for computing one of EXP's
2215 operands. */
2217 static rtx
2218 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2220 optab builtin_optab;
2221 rtx op0, op1, op2, result;
2222 rtx_insn *insns;
2223 tree fndecl = get_callee_fndecl (exp);
2224 tree arg0, arg1, arg2;
2225 machine_mode mode;
2227 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2228 return NULL_RTX;
2230 arg0 = CALL_EXPR_ARG (exp, 0);
2231 arg1 = CALL_EXPR_ARG (exp, 1);
2232 arg2 = CALL_EXPR_ARG (exp, 2);
2234 switch (DECL_FUNCTION_CODE (fndecl))
2236 CASE_FLT_FN (BUILT_IN_FMA):
2237 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2238 builtin_optab = fma_optab; break;
2239 default:
2240 gcc_unreachable ();
2243 /* Make a suitable register to place result in. */
2244 mode = TYPE_MODE (TREE_TYPE (exp));
2246 /* Before working hard, check whether the instruction is available. */
2247 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2248 return NULL_RTX;
2250 result = gen_reg_rtx (mode);
2252 /* Always stabilize the argument list. */
2253 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2254 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2255 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2257 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2258 op1 = expand_normal (arg1);
2259 op2 = expand_normal (arg2);
2261 start_sequence ();
2263 /* Compute into RESULT.
2264 Set RESULT to wherever the result comes back. */
2265 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2266 result, 0);
2268 /* If we were unable to expand via the builtin, stop the sequence
2269 (without outputting the insns) and call to the library function
2270 with the stabilized argument list. */
2271 if (result == 0)
2273 end_sequence ();
2274 return expand_call (exp, target, target == const0_rtx);
2277 /* Output the entire sequence. */
2278 insns = get_insns ();
2279 end_sequence ();
2280 emit_insn (insns);
2282 return result;
2285 /* Expand a call to the builtin sin and cos math functions.
2286 Return NULL_RTX if a normal call should be emitted rather than expanding the
2287 function in-line. EXP is the expression that is a call to the builtin
2288 function; if convenient, the result should be placed in TARGET.
2289 SUBTARGET may be used as the target for computing one of EXP's
2290 operands. */
2292 static rtx
2293 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2295 optab builtin_optab;
2296 rtx op0;
2297 rtx_insn *insns;
2298 tree fndecl = get_callee_fndecl (exp);
2299 machine_mode mode;
2300 tree arg;
2302 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2303 return NULL_RTX;
2305 arg = CALL_EXPR_ARG (exp, 0);
2307 switch (DECL_FUNCTION_CODE (fndecl))
2309 CASE_FLT_FN (BUILT_IN_SIN):
2310 CASE_FLT_FN (BUILT_IN_COS):
2311 builtin_optab = sincos_optab; break;
2312 default:
2313 gcc_unreachable ();
2316 /* Make a suitable register to place result in. */
2317 mode = TYPE_MODE (TREE_TYPE (exp));
2319 /* Check if sincos insn is available, otherwise fallback
2320 to sin or cos insn. */
2321 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2322 switch (DECL_FUNCTION_CODE (fndecl))
2324 CASE_FLT_FN (BUILT_IN_SIN):
2325 builtin_optab = sin_optab; break;
2326 CASE_FLT_FN (BUILT_IN_COS):
2327 builtin_optab = cos_optab; break;
2328 default:
2329 gcc_unreachable ();
2332 /* Before working hard, check whether the instruction is available. */
2333 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2335 rtx result = gen_reg_rtx (mode);
2337 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2338 need to expand the argument again. This way, we will not perform
2339 side-effects more the once. */
2340 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2342 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2344 start_sequence ();
2346 /* Compute into RESULT.
2347 Set RESULT to wherever the result comes back. */
2348 if (builtin_optab == sincos_optab)
2350 int ok;
2352 switch (DECL_FUNCTION_CODE (fndecl))
2354 CASE_FLT_FN (BUILT_IN_SIN):
2355 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2356 break;
2357 CASE_FLT_FN (BUILT_IN_COS):
2358 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2359 break;
2360 default:
2361 gcc_unreachable ();
2363 gcc_assert (ok);
2365 else
2366 result = expand_unop (mode, builtin_optab, op0, result, 0);
2368 if (result != 0)
2370 /* Output the entire sequence. */
2371 insns = get_insns ();
2372 end_sequence ();
2373 emit_insn (insns);
2374 return result;
2377 /* If we were unable to expand via the builtin, stop the sequence
2378 (without outputting the insns) and call to the library function
2379 with the stabilized argument list. */
2380 end_sequence ();
2383 return expand_call (exp, target, target == const0_rtx);
2386 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2387 return an RTL instruction code that implements the functionality.
2388 If that isn't possible or available return CODE_FOR_nothing. */
2390 static enum insn_code
2391 interclass_mathfn_icode (tree arg, tree fndecl)
2393 bool errno_set = false;
2394 optab builtin_optab = unknown_optab;
2395 machine_mode mode;
2397 switch (DECL_FUNCTION_CODE (fndecl))
2399 CASE_FLT_FN (BUILT_IN_ILOGB):
2400 errno_set = true; builtin_optab = ilogb_optab; break;
2401 CASE_FLT_FN (BUILT_IN_ISINF):
2402 builtin_optab = isinf_optab; break;
2403 case BUILT_IN_ISNORMAL:
2404 case BUILT_IN_ISFINITE:
2405 CASE_FLT_FN (BUILT_IN_FINITE):
2406 case BUILT_IN_FINITED32:
2407 case BUILT_IN_FINITED64:
2408 case BUILT_IN_FINITED128:
2409 case BUILT_IN_ISINFD32:
2410 case BUILT_IN_ISINFD64:
2411 case BUILT_IN_ISINFD128:
2412 /* These builtins have no optabs (yet). */
2413 break;
2414 default:
2415 gcc_unreachable ();
2418 /* There's no easy way to detect the case we need to set EDOM. */
2419 if (flag_errno_math && errno_set)
2420 return CODE_FOR_nothing;
2422 /* Optab mode depends on the mode of the input argument. */
2423 mode = TYPE_MODE (TREE_TYPE (arg));
2425 if (builtin_optab)
2426 return optab_handler (builtin_optab, mode);
2427 return CODE_FOR_nothing;
2430 /* Expand a call to one of the builtin math functions that operate on
2431 floating point argument and output an integer result (ilogb, isinf,
2432 isnan, etc).
2433 Return 0 if a normal call should be emitted rather than expanding the
2434 function in-line. EXP is the expression that is a call to the builtin
2435 function; if convenient, the result should be placed in TARGET. */
2437 static rtx
2438 expand_builtin_interclass_mathfn (tree exp, rtx target)
2440 enum insn_code icode = CODE_FOR_nothing;
2441 rtx op0;
2442 tree fndecl = get_callee_fndecl (exp);
2443 machine_mode mode;
2444 tree arg;
2446 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2447 return NULL_RTX;
2449 arg = CALL_EXPR_ARG (exp, 0);
2450 icode = interclass_mathfn_icode (arg, fndecl);
2451 mode = TYPE_MODE (TREE_TYPE (arg));
2453 if (icode != CODE_FOR_nothing)
2455 class expand_operand ops[1];
2456 rtx_insn *last = get_last_insn ();
2457 tree orig_arg = arg;
2459 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2460 need to expand the argument again. This way, we will not perform
2461 side-effects more the once. */
2462 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2464 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2466 if (mode != GET_MODE (op0))
2467 op0 = convert_to_mode (mode, op0, 0);
2469 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2470 if (maybe_legitimize_operands (icode, 0, 1, ops)
2471 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2472 return ops[0].value;
2474 delete_insns_since (last);
2475 CALL_EXPR_ARG (exp, 0) = orig_arg;
2478 return NULL_RTX;
2481 /* Expand a call to the builtin sincos math function.
2482 Return NULL_RTX if a normal call should be emitted rather than expanding the
2483 function in-line. EXP is the expression that is a call to the builtin
2484 function. */
2486 static rtx
2487 expand_builtin_sincos (tree exp)
2489 rtx op0, op1, op2, target1, target2;
2490 machine_mode mode;
2491 tree arg, sinp, cosp;
2492 int result;
2493 location_t loc = EXPR_LOCATION (exp);
2494 tree alias_type, alias_off;
2496 if (!validate_arglist (exp, REAL_TYPE,
2497 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2498 return NULL_RTX;
2500 arg = CALL_EXPR_ARG (exp, 0);
2501 sinp = CALL_EXPR_ARG (exp, 1);
2502 cosp = CALL_EXPR_ARG (exp, 2);
2504 /* Make a suitable register to place result in. */
2505 mode = TYPE_MODE (TREE_TYPE (arg));
2507 /* Check if sincos insn is available, otherwise emit the call. */
2508 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2509 return NULL_RTX;
2511 target1 = gen_reg_rtx (mode);
2512 target2 = gen_reg_rtx (mode);
2514 op0 = expand_normal (arg);
2515 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2516 alias_off = build_int_cst (alias_type, 0);
2517 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2518 sinp, alias_off));
2519 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2520 cosp, alias_off));
2522 /* Compute into target1 and target2.
2523 Set TARGET to wherever the result comes back. */
2524 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2525 gcc_assert (result);
2527 /* Move target1 and target2 to the memory locations indicated
2528 by op1 and op2. */
2529 emit_move_insn (op1, target1);
2530 emit_move_insn (op2, target2);
2532 return const0_rtx;
2535 /* Expand a call to the internal cexpi builtin to the sincos math function.
2536 EXP is the expression that is a call to the builtin function; if convenient,
2537 the result should be placed in TARGET. */
2539 static rtx
2540 expand_builtin_cexpi (tree exp, rtx target)
2542 tree fndecl = get_callee_fndecl (exp);
2543 tree arg, type;
2544 machine_mode mode;
2545 rtx op0, op1, op2;
2546 location_t loc = EXPR_LOCATION (exp);
2548 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2549 return NULL_RTX;
2551 arg = CALL_EXPR_ARG (exp, 0);
2552 type = TREE_TYPE (arg);
2553 mode = TYPE_MODE (TREE_TYPE (arg));
2555 /* Try expanding via a sincos optab, fall back to emitting a libcall
2556 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2557 is only generated from sincos, cexp or if we have either of them. */
2558 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2560 op1 = gen_reg_rtx (mode);
2561 op2 = gen_reg_rtx (mode);
2563 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2565 /* Compute into op1 and op2. */
2566 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2568 else if (targetm.libc_has_function (function_sincos))
2570 tree call, fn = NULL_TREE;
2571 tree top1, top2;
2572 rtx op1a, op2a;
2574 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2575 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2576 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2577 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2578 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2579 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2580 else
2581 gcc_unreachable ();
2583 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2584 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2585 op1a = copy_addr_to_reg (XEXP (op1, 0));
2586 op2a = copy_addr_to_reg (XEXP (op2, 0));
2587 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2588 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2590 /* Make sure not to fold the sincos call again. */
2591 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2592 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2593 call, 3, arg, top1, top2));
2595 else
2597 tree call, fn = NULL_TREE, narg;
2598 tree ctype = build_complex_type (type);
2600 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2601 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2602 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2603 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2604 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2605 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2606 else
2607 gcc_unreachable ();
2609 /* If we don't have a decl for cexp create one. This is the
2610 friendliest fallback if the user calls __builtin_cexpi
2611 without full target C99 function support. */
2612 if (fn == NULL_TREE)
2614 tree fntype;
2615 const char *name = NULL;
2617 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2618 name = "cexpf";
2619 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2620 name = "cexp";
2621 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2622 name = "cexpl";
2624 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2625 fn = build_fn_decl (name, fntype);
2628 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2629 build_real (type, dconst0), arg);
2631 /* Make sure not to fold the cexp call again. */
2632 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2633 return expand_expr (build_call_nary (ctype, call, 1, narg),
2634 target, VOIDmode, EXPAND_NORMAL);
2637 /* Now build the proper return type. */
2638 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2639 make_tree (TREE_TYPE (arg), op2),
2640 make_tree (TREE_TYPE (arg), op1)),
2641 target, VOIDmode, EXPAND_NORMAL);
2644 /* Conveniently construct a function call expression. FNDECL names the
2645 function to be called, N is the number of arguments, and the "..."
2646 parameters are the argument expressions. Unlike build_call_exr
2647 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2649 static tree
2650 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2652 va_list ap;
2653 tree fntype = TREE_TYPE (fndecl);
2654 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2656 va_start (ap, n);
2657 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2658 va_end (ap);
2659 SET_EXPR_LOCATION (fn, loc);
2660 return fn;
2663 /* Expand a call to one of the builtin rounding functions gcc defines
2664 as an extension (lfloor and lceil). As these are gcc extensions we
2665 do not need to worry about setting errno to EDOM.
2666 If expanding via optab fails, lower expression to (int)(floor(x)).
2667 EXP is the expression that is a call to the builtin function;
2668 if convenient, the result should be placed in TARGET. */
2670 static rtx
2671 expand_builtin_int_roundingfn (tree exp, rtx target)
2673 convert_optab builtin_optab;
2674 rtx op0, tmp;
2675 rtx_insn *insns;
2676 tree fndecl = get_callee_fndecl (exp);
2677 enum built_in_function fallback_fn;
2678 tree fallback_fndecl;
2679 machine_mode mode;
2680 tree arg;
2682 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2683 return NULL_RTX;
2685 arg = CALL_EXPR_ARG (exp, 0);
2687 switch (DECL_FUNCTION_CODE (fndecl))
2689 CASE_FLT_FN (BUILT_IN_ICEIL):
2690 CASE_FLT_FN (BUILT_IN_LCEIL):
2691 CASE_FLT_FN (BUILT_IN_LLCEIL):
2692 builtin_optab = lceil_optab;
2693 fallback_fn = BUILT_IN_CEIL;
2694 break;
2696 CASE_FLT_FN (BUILT_IN_IFLOOR):
2697 CASE_FLT_FN (BUILT_IN_LFLOOR):
2698 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2699 builtin_optab = lfloor_optab;
2700 fallback_fn = BUILT_IN_FLOOR;
2701 break;
2703 default:
2704 gcc_unreachable ();
2707 /* Make a suitable register to place result in. */
2708 mode = TYPE_MODE (TREE_TYPE (exp));
2710 target = gen_reg_rtx (mode);
2712 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2713 need to expand the argument again. This way, we will not perform
2714 side-effects more the once. */
2715 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2717 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2719 start_sequence ();
2721 /* Compute into TARGET. */
2722 if (expand_sfix_optab (target, op0, builtin_optab))
2724 /* Output the entire sequence. */
2725 insns = get_insns ();
2726 end_sequence ();
2727 emit_insn (insns);
2728 return target;
2731 /* If we were unable to expand via the builtin, stop the sequence
2732 (without outputting the insns). */
2733 end_sequence ();
2735 /* Fall back to floating point rounding optab. */
2736 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2738 /* For non-C99 targets we may end up without a fallback fndecl here
2739 if the user called __builtin_lfloor directly. In this case emit
2740 a call to the floor/ceil variants nevertheless. This should result
2741 in the best user experience for not full C99 targets. */
2742 if (fallback_fndecl == NULL_TREE)
2744 tree fntype;
2745 const char *name = NULL;
2747 switch (DECL_FUNCTION_CODE (fndecl))
2749 case BUILT_IN_ICEIL:
2750 case BUILT_IN_LCEIL:
2751 case BUILT_IN_LLCEIL:
2752 name = "ceil";
2753 break;
2754 case BUILT_IN_ICEILF:
2755 case BUILT_IN_LCEILF:
2756 case BUILT_IN_LLCEILF:
2757 name = "ceilf";
2758 break;
2759 case BUILT_IN_ICEILL:
2760 case BUILT_IN_LCEILL:
2761 case BUILT_IN_LLCEILL:
2762 name = "ceill";
2763 break;
2764 case BUILT_IN_IFLOOR:
2765 case BUILT_IN_LFLOOR:
2766 case BUILT_IN_LLFLOOR:
2767 name = "floor";
2768 break;
2769 case BUILT_IN_IFLOORF:
2770 case BUILT_IN_LFLOORF:
2771 case BUILT_IN_LLFLOORF:
2772 name = "floorf";
2773 break;
2774 case BUILT_IN_IFLOORL:
2775 case BUILT_IN_LFLOORL:
2776 case BUILT_IN_LLFLOORL:
2777 name = "floorl";
2778 break;
2779 default:
2780 gcc_unreachable ();
2783 fntype = build_function_type_list (TREE_TYPE (arg),
2784 TREE_TYPE (arg), NULL_TREE);
2785 fallback_fndecl = build_fn_decl (name, fntype);
2788 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2790 tmp = expand_normal (exp);
2791 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2793 /* Truncate the result of floating point optab to integer
2794 via expand_fix (). */
2795 target = gen_reg_rtx (mode);
2796 expand_fix (target, tmp, 0);
2798 return target;
2801 /* Expand a call to one of the builtin math functions doing integer
2802 conversion (lrint).
2803 Return 0 if a normal call should be emitted rather than expanding the
2804 function in-line. EXP is the expression that is a call to the builtin
2805 function; if convenient, the result should be placed in TARGET. */
2807 static rtx
2808 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2810 convert_optab builtin_optab;
2811 rtx op0;
2812 rtx_insn *insns;
2813 tree fndecl = get_callee_fndecl (exp);
2814 tree arg;
2815 machine_mode mode;
2816 enum built_in_function fallback_fn = BUILT_IN_NONE;
2818 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2819 return NULL_RTX;
2821 arg = CALL_EXPR_ARG (exp, 0);
2823 switch (DECL_FUNCTION_CODE (fndecl))
2825 CASE_FLT_FN (BUILT_IN_IRINT):
2826 fallback_fn = BUILT_IN_LRINT;
2827 gcc_fallthrough ();
2828 CASE_FLT_FN (BUILT_IN_LRINT):
2829 CASE_FLT_FN (BUILT_IN_LLRINT):
2830 builtin_optab = lrint_optab;
2831 break;
2833 CASE_FLT_FN (BUILT_IN_IROUND):
2834 fallback_fn = BUILT_IN_LROUND;
2835 gcc_fallthrough ();
2836 CASE_FLT_FN (BUILT_IN_LROUND):
2837 CASE_FLT_FN (BUILT_IN_LLROUND):
2838 builtin_optab = lround_optab;
2839 break;
2841 default:
2842 gcc_unreachable ();
2845 /* There's no easy way to detect the case we need to set EDOM. */
2846 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2847 return NULL_RTX;
2849 /* Make a suitable register to place result in. */
2850 mode = TYPE_MODE (TREE_TYPE (exp));
2852 /* There's no easy way to detect the case we need to set EDOM. */
2853 if (!flag_errno_math)
2855 rtx result = gen_reg_rtx (mode);
2857 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2858 need to expand the argument again. This way, we will not perform
2859 side-effects more the once. */
2860 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2862 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2864 start_sequence ();
2866 if (expand_sfix_optab (result, op0, builtin_optab))
2868 /* Output the entire sequence. */
2869 insns = get_insns ();
2870 end_sequence ();
2871 emit_insn (insns);
2872 return result;
2875 /* If we were unable to expand via the builtin, stop the sequence
2876 (without outputting the insns) and call to the library function
2877 with the stabilized argument list. */
2878 end_sequence ();
2881 if (fallback_fn != BUILT_IN_NONE)
2883 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2884 targets, (int) round (x) should never be transformed into
2885 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2886 a call to lround in the hope that the target provides at least some
2887 C99 functions. This should result in the best user experience for
2888 not full C99 targets. */
2889 tree fallback_fndecl = mathfn_built_in_1
2890 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2892 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2893 fallback_fndecl, 1, arg);
2895 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2896 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2897 return convert_to_mode (mode, target, 0);
2900 return expand_call (exp, target, target == const0_rtx);
2903 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2904 a normal call should be emitted rather than expanding the function
2905 in-line. EXP is the expression that is a call to the builtin
2906 function; if convenient, the result should be placed in TARGET. */
2908 static rtx
2909 expand_builtin_powi (tree exp, rtx target)
2911 tree arg0, arg1;
2912 rtx op0, op1;
2913 machine_mode mode;
2914 machine_mode mode2;
2916 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2917 return NULL_RTX;
2919 arg0 = CALL_EXPR_ARG (exp, 0);
2920 arg1 = CALL_EXPR_ARG (exp, 1);
2921 mode = TYPE_MODE (TREE_TYPE (exp));
2923 /* Emit a libcall to libgcc. */
2925 /* Mode of the 2nd argument must match that of an int. */
2926 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2928 if (target == NULL_RTX)
2929 target = gen_reg_rtx (mode);
2931 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2932 if (GET_MODE (op0) != mode)
2933 op0 = convert_to_mode (mode, op0, 0);
2934 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2935 if (GET_MODE (op1) != mode2)
2936 op1 = convert_to_mode (mode2, op1, 0);
2938 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2939 target, LCT_CONST, mode,
2940 op0, mode, op1, mode2);
2942 return target;
2945 /* Expand expression EXP which is a call to the strlen builtin. Return
2946 NULL_RTX if we failed and the caller should emit a normal call, otherwise
2947 try to get the result in TARGET, if convenient. */
2949 static rtx
2950 expand_builtin_strlen (tree exp, rtx target,
2951 machine_mode target_mode)
2953 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2954 return NULL_RTX;
2956 class expand_operand ops[4];
2957 rtx pat;
2958 tree len;
2959 tree src = CALL_EXPR_ARG (exp, 0);
2960 rtx src_reg;
2961 rtx_insn *before_strlen;
2962 machine_mode insn_mode;
2963 enum insn_code icode = CODE_FOR_nothing;
2964 unsigned int align;
2966 /* If the length can be computed at compile-time, return it. */
2967 len = c_strlen (src, 0);
2968 if (len)
2969 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2971 /* If the length can be computed at compile-time and is constant
2972 integer, but there are side-effects in src, evaluate
2973 src for side-effects, then return len.
2974 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2975 can be optimized into: i++; x = 3; */
2976 len = c_strlen (src, 1);
2977 if (len && TREE_CODE (len) == INTEGER_CST)
2979 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2980 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2983 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2985 /* If SRC is not a pointer type, don't do this operation inline. */
2986 if (align == 0)
2987 return NULL_RTX;
2989 /* Bail out if we can't compute strlen in the right mode. */
2990 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2992 icode = optab_handler (strlen_optab, insn_mode);
2993 if (icode != CODE_FOR_nothing)
2994 break;
2996 if (insn_mode == VOIDmode)
2997 return NULL_RTX;
2999 /* Make a place to hold the source address. We will not expand
3000 the actual source until we are sure that the expansion will
3001 not fail -- there are trees that cannot be expanded twice. */
3002 src_reg = gen_reg_rtx (Pmode);
3004 /* Mark the beginning of the strlen sequence so we can emit the
3005 source operand later. */
3006 before_strlen = get_last_insn ();
3008 create_output_operand (&ops[0], target, insn_mode);
3009 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3010 create_integer_operand (&ops[2], 0);
3011 create_integer_operand (&ops[3], align);
3012 if (!maybe_expand_insn (icode, 4, ops))
3013 return NULL_RTX;
3015 /* Check to see if the argument was declared attribute nonstring
3016 and if so, issue a warning since at this point it's not known
3017 to be nul-terminated. */
3018 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3020 /* Now that we are assured of success, expand the source. */
3021 start_sequence ();
3022 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3023 if (pat != src_reg)
3025 #ifdef POINTERS_EXTEND_UNSIGNED
3026 if (GET_MODE (pat) != Pmode)
3027 pat = convert_to_mode (Pmode, pat,
3028 POINTERS_EXTEND_UNSIGNED);
3029 #endif
3030 emit_move_insn (src_reg, pat);
3032 pat = get_insns ();
3033 end_sequence ();
3035 if (before_strlen)
3036 emit_insn_after (pat, before_strlen);
3037 else
3038 emit_insn_before (pat, get_insns ());
3040 /* Return the value in the proper mode for this function. */
3041 if (GET_MODE (ops[0].value) == target_mode)
3042 target = ops[0].value;
3043 else if (target != 0)
3044 convert_move (target, ops[0].value, 0);
3045 else
3046 target = convert_to_mode (target_mode, ops[0].value, 0);
3048 return target;
3051 /* Expand call EXP to the strnlen built-in, returning the result
3052 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3054 static rtx
3055 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3057 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3058 return NULL_RTX;
3060 tree src = CALL_EXPR_ARG (exp, 0);
3061 tree bound = CALL_EXPR_ARG (exp, 1);
3063 if (!bound)
3064 return NULL_RTX;
3066 location_t loc = UNKNOWN_LOCATION;
3067 if (EXPR_HAS_LOCATION (exp))
3068 loc = EXPR_LOCATION (exp);
3070 tree maxobjsize = max_object_size ();
3071 tree func = get_callee_fndecl (exp);
3073 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3074 so these conversions aren't necessary. */
3075 c_strlen_data lendata = { };
3076 tree len = c_strlen (src, 0, &lendata, 1);
3077 if (len)
3078 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3080 if (TREE_CODE (bound) == INTEGER_CST)
3082 if (!TREE_NO_WARNING (exp)
3083 && tree_int_cst_lt (maxobjsize, bound)
3084 && warning_at (loc, OPT_Wstringop_overflow_,
3085 "%K%qD specified bound %E "
3086 "exceeds maximum object size %E",
3087 exp, func, bound, maxobjsize))
3088 TREE_NO_WARNING (exp) = true;
3090 bool exact = true;
3091 if (!len || TREE_CODE (len) != INTEGER_CST)
3093 /* Clear EXACT if LEN may be less than SRC suggests,
3094 such as in
3095 strnlen (&a[i], sizeof a)
3096 where the value of i is unknown. Unless i's value is
3097 zero, the call is unsafe because the bound is greater. */
3098 lendata.decl = unterminated_array (src, &len, &exact);
3099 if (!lendata.decl)
3100 return NULL_RTX;
3103 if (lendata.decl
3104 && !TREE_NO_WARNING (exp)
3105 && ((tree_int_cst_lt (len, bound))
3106 || !exact))
3108 location_t warnloc
3109 = expansion_point_location_if_in_system_header (loc);
3111 if (warning_at (warnloc, OPT_Wstringop_overflow_,
3112 exact
3113 ? G_("%K%qD specified bound %E exceeds the size %E "
3114 "of unterminated array")
3115 : G_("%K%qD specified bound %E may exceed the size "
3116 "of at most %E of unterminated array"),
3117 exp, func, bound, len))
3119 inform (DECL_SOURCE_LOCATION (lendata.decl),
3120 "referenced argument declared here");
3121 TREE_NO_WARNING (exp) = true;
3122 return NULL_RTX;
3126 if (!len)
3127 return NULL_RTX;
3129 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3130 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3133 if (TREE_CODE (bound) != SSA_NAME)
3134 return NULL_RTX;
3136 wide_int min, max;
3137 enum value_range_kind rng = get_range_info (bound, &min, &max);
3138 if (rng != VR_RANGE)
3139 return NULL_RTX;
3141 if (!TREE_NO_WARNING (exp)
3142 && wi::ltu_p (wi::to_wide (maxobjsize, min.get_precision ()), min)
3143 && warning_at (loc, OPT_Wstringop_overflow_,
3144 "%K%qD specified bound [%wu, %wu] "
3145 "exceeds maximum object size %E",
3146 exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
3147 TREE_NO_WARNING (exp) = true;
3149 bool exact = true;
3150 if (!len || TREE_CODE (len) != INTEGER_CST)
3152 lendata.decl = unterminated_array (src, &len, &exact);
3153 if (!lendata.decl)
3154 return NULL_RTX;
3157 if (lendata.decl
3158 && !TREE_NO_WARNING (exp)
3159 && (wi::ltu_p (wi::to_wide (len), min)
3160 || !exact))
3162 location_t warnloc
3163 = expansion_point_location_if_in_system_header (loc);
3165 if (warning_at (warnloc, OPT_Wstringop_overflow_,
3166 exact
3167 ? G_("%K%qD specified bound [%wu, %wu] exceeds "
3168 "the size %E of unterminated array")
3169 : G_("%K%qD specified bound [%wu, %wu] may exceed "
3170 "the size of at most %E of unterminated array"),
3171 exp, func, min.to_uhwi (), max.to_uhwi (), len))
3173 inform (DECL_SOURCE_LOCATION (lendata.decl),
3174 "referenced argument declared here");
3175 TREE_NO_WARNING (exp) = true;
3179 if (lendata.decl)
3180 return NULL_RTX;
3182 if (wi::gtu_p (min, wi::to_wide (len)))
3183 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3185 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3186 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3189 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3190 bytes from constant string DATA + OFFSET and return it as target
3191 constant. */
3193 static rtx
3194 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3195 scalar_int_mode mode)
3197 const char *str = (const char *) data;
3199 gcc_assert (offset >= 0
3200 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3201 <= strlen (str) + 1));
3203 return c_readstr (str + offset, mode);
3206 /* LEN specify length of the block of memcpy/memset operation.
3207 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3208 In some cases we can make very likely guess on max size, then we
3209 set it into PROBABLE_MAX_SIZE. */
3211 static void
3212 determine_block_size (tree len, rtx len_rtx,
3213 unsigned HOST_WIDE_INT *min_size,
3214 unsigned HOST_WIDE_INT *max_size,
3215 unsigned HOST_WIDE_INT *probable_max_size)
3217 if (CONST_INT_P (len_rtx))
3219 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3220 return;
3222 else
3224 wide_int min, max;
3225 enum value_range_kind range_type = VR_UNDEFINED;
3227 /* Determine bounds from the type. */
3228 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3229 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3230 else
3231 *min_size = 0;
3232 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3233 *probable_max_size = *max_size
3234 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3235 else
3236 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3238 if (TREE_CODE (len) == SSA_NAME)
3239 range_type = get_range_info (len, &min, &max);
3240 if (range_type == VR_RANGE)
3242 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3243 *min_size = min.to_uhwi ();
3244 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3245 *probable_max_size = *max_size = max.to_uhwi ();
3247 else if (range_type == VR_ANTI_RANGE)
3249 /* Anti range 0...N lets us to determine minimal size to N+1. */
3250 if (min == 0)
3252 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3253 *min_size = max.to_uhwi () + 1;
3255 /* Code like
3257 int n;
3258 if (n < 100)
3259 memcpy (a, b, n)
3261 Produce anti range allowing negative values of N. We still
3262 can use the information and make a guess that N is not negative.
3264 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3265 *probable_max_size = min.to_uhwi () - 1;
3268 gcc_checking_assert (*max_size <=
3269 (unsigned HOST_WIDE_INT)
3270 GET_MODE_MASK (GET_MODE (len_rtx)));
3273 /* Try to verify that the sizes and lengths of the arguments to a string
3274 manipulation function given by EXP are within valid bounds and that
3275 the operation does not lead to buffer overflow or read past the end.
3276 Arguments other than EXP may be null. When non-null, the arguments
3277 have the following meaning:
3278 DST is the destination of a copy call or NULL otherwise.
3279 SRC is the source of a copy call or NULL otherwise.
3280 DSTWRITE is the number of bytes written into the destination obtained
3281 from the user-supplied size argument to the function (such as in
3282 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3283 MAXREAD is the user-supplied bound on the length of the source sequence
3284 (such as in strncat(d, s, N). It specifies the upper limit on the number
3285 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3286 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3287 expression EXP is a string function call (as opposed to a memory call
3288 like memcpy). As an exception, SRCSTR can also be an integer denoting
3289 the precomputed size of the source string or object (for functions like
3290 memcpy).
3291 DSTSIZE is the size of the destination object specified by the last
3292 argument to the _chk builtins, typically resulting from the expansion
3293 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3294 DSTSIZE).
3296 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3297 SIZE_MAX.
3299 If the call is successfully verified as safe return true, otherwise
3300 return false. */
3302 static bool
3303 check_access (tree exp, tree, tree, tree dstwrite,
3304 tree maxread, tree srcstr, tree dstsize)
3306 int opt = OPT_Wstringop_overflow_;
3308 /* The size of the largest object is half the address space, or
3309 PTRDIFF_MAX. (This is way too permissive.) */
3310 tree maxobjsize = max_object_size ();
3312 /* Either the length of the source string for string functions or
3313 the size of the source object for raw memory functions. */
3314 tree slen = NULL_TREE;
3316 tree range[2] = { NULL_TREE, NULL_TREE };
3318 /* Set to true when the exact number of bytes written by a string
3319 function like strcpy is not known and the only thing that is
3320 known is that it must be at least one (for the terminating nul). */
3321 bool at_least_one = false;
3322 if (srcstr)
3324 /* SRCSTR is normally a pointer to string but as a special case
3325 it can be an integer denoting the length of a string. */
3326 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3328 /* Try to determine the range of lengths the source string
3329 refers to. If it can be determined and is less than
3330 the upper bound given by MAXREAD add one to it for
3331 the terminating nul. Otherwise, set it to one for
3332 the same reason, or to MAXREAD as appropriate. */
3333 c_strlen_data lendata = { };
3334 get_range_strlen (srcstr, &lendata, /* eltsize = */ 1);
3335 range[0] = lendata.minlen;
3336 range[1] = lendata.maxbound ? lendata.maxbound : lendata.maxlen;
3337 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3339 if (maxread && tree_int_cst_le (maxread, range[0]))
3340 range[0] = range[1] = maxread;
3341 else
3342 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3343 range[0], size_one_node);
3345 if (maxread && tree_int_cst_le (maxread, range[1]))
3346 range[1] = maxread;
3347 else if (!integer_all_onesp (range[1]))
3348 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3349 range[1], size_one_node);
3351 slen = range[0];
3353 else
3355 at_least_one = true;
3356 slen = size_one_node;
3359 else
3360 slen = srcstr;
3363 if (!dstwrite && !maxread)
3365 /* When the only available piece of data is the object size
3366 there is nothing to do. */
3367 if (!slen)
3368 return true;
3370 /* Otherwise, when the length of the source sequence is known
3371 (as with strlen), set DSTWRITE to it. */
3372 if (!range[0])
3373 dstwrite = slen;
3376 if (!dstsize)
3377 dstsize = maxobjsize;
3379 if (dstwrite)
3380 get_size_range (dstwrite, range);
3382 tree func = get_callee_fndecl (exp);
3384 /* First check the number of bytes to be written against the maximum
3385 object size. */
3386 if (range[0]
3387 && TREE_CODE (range[0]) == INTEGER_CST
3388 && tree_int_cst_lt (maxobjsize, range[0]))
3390 if (TREE_NO_WARNING (exp))
3391 return false;
3393 location_t loc = tree_nonartificial_location (exp);
3394 loc = expansion_point_location_if_in_system_header (loc);
3396 bool warned;
3397 if (range[0] == range[1])
3398 warned = warning_at (loc, opt,
3399 "%K%qD specified size %E "
3400 "exceeds maximum object size %E",
3401 exp, func, range[0], maxobjsize);
3402 else
3403 warned = warning_at (loc, opt,
3404 "%K%qD specified size between %E and %E "
3405 "exceeds maximum object size %E",
3406 exp, func,
3407 range[0], range[1], maxobjsize);
3408 if (warned)
3409 TREE_NO_WARNING (exp) = true;
3411 return false;
3414 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3415 constant, and in range of unsigned HOST_WIDE_INT. */
3416 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3418 /* Next check the number of bytes to be written against the destination
3419 object size. */
3420 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3422 if (range[0]
3423 && TREE_CODE (range[0]) == INTEGER_CST
3424 && ((tree_fits_uhwi_p (dstsize)
3425 && tree_int_cst_lt (dstsize, range[0]))
3426 || (dstwrite
3427 && tree_fits_uhwi_p (dstwrite)
3428 && tree_int_cst_lt (dstwrite, range[0]))))
3430 if (TREE_NO_WARNING (exp))
3431 return false;
3433 location_t loc = tree_nonartificial_location (exp);
3434 loc = expansion_point_location_if_in_system_header (loc);
3436 if (dstwrite == slen && at_least_one)
3438 /* This is a call to strcpy with a destination of 0 size
3439 and a source of unknown length. The call will write
3440 at least one byte past the end of the destination. */
3441 warning_at (loc, opt,
3442 "%K%qD writing %E or more bytes into a region "
3443 "of size %E overflows the destination",
3444 exp, func, range[0], dstsize);
3446 else if (tree_int_cst_equal (range[0], range[1]))
3447 warning_n (loc, opt, tree_to_uhwi (range[0]),
3448 "%K%qD writing %E byte into a region "
3449 "of size %E overflows the destination",
3450 "%K%qD writing %E bytes into a region "
3451 "of size %E overflows the destination",
3452 exp, func, range[0], dstsize);
3453 else if (tree_int_cst_sign_bit (range[1]))
3455 /* Avoid printing the upper bound if it's invalid. */
3456 warning_at (loc, opt,
3457 "%K%qD writing %E or more bytes into a region "
3458 "of size %E overflows the destination",
3459 exp, func, range[0], dstsize);
3461 else
3462 warning_at (loc, opt,
3463 "%K%qD writing between %E and %E bytes into "
3464 "a region of size %E overflows the destination",
3465 exp, func, range[0], range[1],
3466 dstsize);
3468 /* Return error when an overflow has been detected. */
3469 return false;
3473 /* Check the maximum length of the source sequence against the size
3474 of the destination object if known, or against the maximum size
3475 of an object. */
3476 if (maxread)
3478 get_size_range (maxread, range);
3479 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3481 location_t loc = tree_nonartificial_location (exp);
3482 loc = expansion_point_location_if_in_system_header (loc);
3484 if (tree_int_cst_lt (maxobjsize, range[0]))
3486 if (TREE_NO_WARNING (exp))
3487 return false;
3489 /* Warn about crazy big sizes first since that's more
3490 likely to be meaningful than saying that the bound
3491 is greater than the object size if both are big. */
3492 if (range[0] == range[1])
3493 warning_at (loc, opt,
3494 "%K%qD specified bound %E "
3495 "exceeds maximum object size %E",
3496 exp, func,
3497 range[0], maxobjsize);
3498 else
3499 warning_at (loc, opt,
3500 "%K%qD specified bound between %E and %E "
3501 "exceeds maximum object size %E",
3502 exp, func,
3503 range[0], range[1], maxobjsize);
3505 return false;
3508 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3510 if (TREE_NO_WARNING (exp))
3511 return false;
3513 if (tree_int_cst_equal (range[0], range[1]))
3514 warning_at (loc, opt,
3515 "%K%qD specified bound %E "
3516 "exceeds destination size %E",
3517 exp, func,
3518 range[0], dstsize);
3519 else
3520 warning_at (loc, opt,
3521 "%K%qD specified bound between %E and %E "
3522 "exceeds destination size %E",
3523 exp, func,
3524 range[0], range[1], dstsize);
3525 return false;
3530 /* Check for reading past the end of SRC. */
3531 if (slen
3532 && slen == srcstr
3533 && dstwrite && range[0]
3534 && tree_int_cst_lt (slen, range[0]))
3536 if (TREE_NO_WARNING (exp))
3537 return false;
3539 location_t loc = tree_nonartificial_location (exp);
3541 if (tree_int_cst_equal (range[0], range[1]))
3542 warning_n (loc, opt, tree_to_uhwi (range[0]),
3543 "%K%qD reading %E byte from a region of size %E",
3544 "%K%qD reading %E bytes from a region of size %E",
3545 exp, func, range[0], slen);
3546 else if (tree_int_cst_sign_bit (range[1]))
3548 /* Avoid printing the upper bound if it's invalid. */
3549 warning_at (loc, opt,
3550 "%K%qD reading %E or more bytes from a region "
3551 "of size %E",
3552 exp, func, range[0], slen);
3554 else
3555 warning_at (loc, opt,
3556 "%K%qD reading between %E and %E bytes from a region "
3557 "of size %E",
3558 exp, func, range[0], range[1], slen);
3559 return false;
3562 return true;
3565 /* Helper to compute the size of the object referenced by the DEST
3566 expression which must have pointer type, using Object Size type
3567 OSTYPE (only the least significant 2 bits are used). Return
3568 an estimate of the size of the object if successful or NULL when
3569 the size cannot be determined. When the referenced object involves
3570 a non-constant offset in some range the returned value represents
3571 the largest size given the smallest non-negative offset in the
3572 range. If nonnull, set *PDECL to the decl of the referenced
3573 subobject if it can be determined, or to null otherwise.
3574 The function is intended for diagnostics and should not be used
3575 to influence code generation or optimization. */
3577 tree
3578 compute_objsize (tree dest, int ostype, tree *pdecl /* = NULL */)
3580 tree dummy = NULL_TREE;
3581 if (!pdecl)
3582 pdecl = &dummy;
3584 unsigned HOST_WIDE_INT size;
3586 /* Only the two least significant bits are meaningful. */
3587 ostype &= 3;
3589 if (compute_builtin_object_size (dest, ostype, &size, pdecl))
3590 return build_int_cst (sizetype, size);
3592 if (TREE_CODE (dest) == SSA_NAME)
3594 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3595 if (!is_gimple_assign (stmt))
3596 return NULL_TREE;
3598 dest = gimple_assign_rhs1 (stmt);
3600 tree_code code = gimple_assign_rhs_code (stmt);
3601 if (code == POINTER_PLUS_EXPR)
3603 /* compute_builtin_object_size fails for addresses with
3604 non-constant offsets. Try to determine the range of
3605 such an offset here and use it to adjust the constant
3606 size. */
3607 tree off = gimple_assign_rhs2 (stmt);
3608 if (TREE_CODE (off) == INTEGER_CST)
3610 if (tree size = compute_objsize (dest, ostype, pdecl))
3612 wide_int wioff = wi::to_wide (off);
3613 wide_int wisiz = wi::to_wide (size);
3615 /* Ignore negative offsets for now. For others,
3616 use the lower bound as the most optimistic
3617 estimate of the (remaining) size. */
3618 if (wi::sign_mask (wioff))
3620 else if (wi::ltu_p (wioff, wisiz))
3621 return wide_int_to_tree (TREE_TYPE (size),
3622 wi::sub (wisiz, wioff));
3623 else
3624 return size_zero_node;
3627 else if (TREE_CODE (off) == SSA_NAME
3628 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3630 wide_int min, max;
3631 enum value_range_kind rng = get_range_info (off, &min, &max);
3633 if (rng == VR_RANGE)
3635 if (tree size = compute_objsize (dest, ostype, pdecl))
3637 wide_int wisiz = wi::to_wide (size);
3639 /* Ignore negative offsets for now. For others,
3640 use the lower bound as the most optimistic
3641 estimate of the (remaining)size. */
3642 if (wi::sign_mask (min)
3643 || wi::sign_mask (max))
3645 else if (wi::ltu_p (min, wisiz))
3646 return wide_int_to_tree (TREE_TYPE (size),
3647 wi::sub (wisiz, min));
3648 else
3649 return size_zero_node;
3654 else if (code != ADDR_EXPR)
3655 return NULL_TREE;
3658 /* Unless computing the largest size (for memcpy and other raw memory
3659 functions), try to determine the size of the object from its type. */
3660 if (!ostype)
3661 return NULL_TREE;
3663 if (TREE_CODE (dest) == ARRAY_REF
3664 || TREE_CODE (dest) == MEM_REF)
3666 tree ref = TREE_OPERAND (dest, 0);
3667 tree off = TREE_OPERAND (dest, 1);
3668 if (tree size = compute_objsize (ref, ostype, pdecl))
3670 /* If the declaration of the destination object is known
3671 to have zero size, return zero. */
3672 if (integer_zerop (size))
3673 return integer_zero_node;
3675 if (TREE_CODE (off) != INTEGER_CST
3676 || TREE_CODE (size) != INTEGER_CST)
3677 return NULL_TREE;
3679 if (TREE_CODE (dest) == ARRAY_REF)
3681 tree eltype = TREE_TYPE (dest);
3682 tree tpsize = TYPE_SIZE_UNIT (eltype);
3683 if (tpsize && TREE_CODE (tpsize) == INTEGER_CST)
3684 off = fold_build2 (MULT_EXPR, size_type_node, off, tpsize);
3685 else
3686 return NULL_TREE;
3689 if (tree_int_cst_lt (off, size))
3690 return fold_build2 (MINUS_EXPR, size_type_node, size, off);
3691 return integer_zero_node;
3694 return NULL_TREE;
3697 if (TREE_CODE (dest) == COMPONENT_REF)
3699 *pdecl = TREE_OPERAND (dest, 1);
3700 return component_ref_size (dest);
3703 if (TREE_CODE (dest) != ADDR_EXPR)
3704 return NULL_TREE;
3706 tree ref = TREE_OPERAND (dest, 0);
3707 if (DECL_P (ref))
3709 *pdecl = ref;
3710 return DECL_SIZE_UNIT (ref);
3713 tree type = TREE_TYPE (dest);
3714 if (TREE_CODE (type) == POINTER_TYPE)
3715 type = TREE_TYPE (type);
3717 type = TYPE_MAIN_VARIANT (type);
3719 if (TREE_CODE (type) == ARRAY_TYPE
3720 && !array_at_struct_end_p (ref))
3722 if (tree size = TYPE_SIZE_UNIT (type))
3723 return TREE_CODE (size) == INTEGER_CST ? size : NULL_TREE;
3726 return NULL_TREE;
3729 /* Helper to determine and check the sizes of the source and the destination
3730 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3731 call expression, DEST is the destination argument, SRC is the source
3732 argument or null, and LEN is the number of bytes. Use Object Size type-0
3733 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3734 (no overflow or invalid sizes), false otherwise. */
3736 static bool
3737 check_memop_access (tree exp, tree dest, tree src, tree size)
3739 /* For functions like memset and memcpy that operate on raw memory
3740 try to determine the size of the largest source and destination
3741 object using type-0 Object Size regardless of the object size
3742 type specified by the option. */
3743 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3744 tree dstsize = compute_objsize (dest, 0);
3746 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3747 srcsize, dstsize);
3750 /* Validate memchr arguments without performing any expansion.
3751 Return NULL_RTX. */
3753 static rtx
3754 expand_builtin_memchr (tree exp, rtx)
3756 if (!validate_arglist (exp,
3757 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3758 return NULL_RTX;
3760 tree arg1 = CALL_EXPR_ARG (exp, 0);
3761 tree len = CALL_EXPR_ARG (exp, 2);
3763 /* Diagnose calls where the specified length exceeds the size
3764 of the object. */
3765 if (warn_stringop_overflow)
3767 tree size = compute_objsize (arg1, 0);
3768 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3769 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
3772 return NULL_RTX;
3775 /* Expand a call EXP to the memcpy builtin.
3776 Return NULL_RTX if we failed, the caller should emit a normal call,
3777 otherwise try to get the result in TARGET, if convenient (and in
3778 mode MODE if that's convenient). */
3780 static rtx
3781 expand_builtin_memcpy (tree exp, rtx target)
3783 if (!validate_arglist (exp,
3784 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3785 return NULL_RTX;
3787 tree dest = CALL_EXPR_ARG (exp, 0);
3788 tree src = CALL_EXPR_ARG (exp, 1);
3789 tree len = CALL_EXPR_ARG (exp, 2);
3791 check_memop_access (exp, dest, src, len);
3793 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3794 /*retmode=*/ RETURN_BEGIN, false);
3797 /* Check a call EXP to the memmove built-in for validity.
3798 Return NULL_RTX on both success and failure. */
3800 static rtx
3801 expand_builtin_memmove (tree exp, rtx target)
3803 if (!validate_arglist (exp,
3804 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3805 return NULL_RTX;
3807 tree dest = CALL_EXPR_ARG (exp, 0);
3808 tree src = CALL_EXPR_ARG (exp, 1);
3809 tree len = CALL_EXPR_ARG (exp, 2);
3811 check_memop_access (exp, dest, src, len);
3813 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3814 /*retmode=*/ RETURN_BEGIN, true);
3817 /* Expand a call EXP to the mempcpy builtin.
3818 Return NULL_RTX if we failed; the caller should emit a normal call,
3819 otherwise try to get the result in TARGET, if convenient (and in
3820 mode MODE if that's convenient). */
3822 static rtx
3823 expand_builtin_mempcpy (tree exp, rtx target)
3825 if (!validate_arglist (exp,
3826 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3827 return NULL_RTX;
3829 tree dest = CALL_EXPR_ARG (exp, 0);
3830 tree src = CALL_EXPR_ARG (exp, 1);
3831 tree len = CALL_EXPR_ARG (exp, 2);
3833 /* Policy does not generally allow using compute_objsize (which
3834 is used internally by check_memop_size) to change code generation
3835 or drive optimization decisions.
3837 In this instance it is safe because the code we generate has
3838 the same semantics regardless of the return value of
3839 check_memop_sizes. Exactly the same amount of data is copied
3840 and the return value is exactly the same in both cases.
3842 Furthermore, check_memop_size always uses mode 0 for the call to
3843 compute_objsize, so the imprecise nature of compute_objsize is
3844 avoided. */
3846 /* Avoid expanding mempcpy into memcpy when the call is determined
3847 to overflow the buffer. This also prevents the same overflow
3848 from being diagnosed again when expanding memcpy. */
3849 if (!check_memop_access (exp, dest, src, len))
3850 return NULL_RTX;
3852 return expand_builtin_mempcpy_args (dest, src, len,
3853 target, exp, /*retmode=*/ RETURN_END);
3856 /* Helper function to do the actual work for expand of memory copy family
3857 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3858 of memory from SRC to DEST and assign to TARGET if convenient. Return
3859 value is based on RETMODE argument. */
3861 static rtx
3862 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3863 rtx target, tree exp, memop_ret retmode,
3864 bool might_overlap)
3866 const char *src_str;
3867 unsigned int src_align = get_pointer_alignment (src);
3868 unsigned int dest_align = get_pointer_alignment (dest);
3869 rtx dest_mem, src_mem, dest_addr, len_rtx;
3870 HOST_WIDE_INT expected_size = -1;
3871 unsigned int expected_align = 0;
3872 unsigned HOST_WIDE_INT min_size;
3873 unsigned HOST_WIDE_INT max_size;
3874 unsigned HOST_WIDE_INT probable_max_size;
3876 bool is_move_done;
3878 /* If DEST is not a pointer type, call the normal function. */
3879 if (dest_align == 0)
3880 return NULL_RTX;
3882 /* If either SRC is not a pointer type, don't do this
3883 operation in-line. */
3884 if (src_align == 0)
3885 return NULL_RTX;
3887 if (currently_expanding_gimple_stmt)
3888 stringop_block_profile (currently_expanding_gimple_stmt,
3889 &expected_align, &expected_size);
3891 if (expected_align < dest_align)
3892 expected_align = dest_align;
3893 dest_mem = get_memory_rtx (dest, len);
3894 set_mem_align (dest_mem, dest_align);
3895 len_rtx = expand_normal (len);
3896 determine_block_size (len, len_rtx, &min_size, &max_size,
3897 &probable_max_size);
3898 src_str = c_getstr (src);
3900 /* If SRC is a string constant and block move would be done by
3901 pieces, we can avoid loading the string from memory and only
3902 stored the computed constants. This works in the overlap
3903 (memmove) case as well because store_by_pieces just generates a
3904 series of stores of constants from the string constant returned
3905 by c_getstr(). */
3906 if (src_str
3907 && CONST_INT_P (len_rtx)
3908 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3909 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3910 CONST_CAST (char *, src_str),
3911 dest_align, false))
3913 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3914 builtin_memcpy_read_str,
3915 CONST_CAST (char *, src_str),
3916 dest_align, false, retmode);
3917 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3918 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3919 return dest_mem;
3922 src_mem = get_memory_rtx (src, len);
3923 set_mem_align (src_mem, src_align);
3925 /* Copy word part most expediently. */
3926 enum block_op_methods method = BLOCK_OP_NORMAL;
3927 if (CALL_EXPR_TAILCALL (exp)
3928 && (retmode == RETURN_BEGIN || target == const0_rtx))
3929 method = BLOCK_OP_TAILCALL;
3930 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
3931 && retmode == RETURN_END
3932 && !might_overlap
3933 && target != const0_rtx);
3934 if (use_mempcpy_call)
3935 method = BLOCK_OP_NO_LIBCALL_RET;
3936 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3937 expected_align, expected_size,
3938 min_size, max_size, probable_max_size,
3939 use_mempcpy_call, &is_move_done, might_overlap);
3941 /* Bail out when a mempcpy call would be expanded as libcall and when
3942 we have a target that provides a fast implementation
3943 of mempcpy routine. */
3944 if (!is_move_done)
3945 return NULL_RTX;
3947 if (dest_addr == pc_rtx)
3948 return NULL_RTX;
3950 if (dest_addr == 0)
3952 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3953 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3956 if (retmode != RETURN_BEGIN && target != const0_rtx)
3958 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3959 /* stpcpy pointer to last byte. */
3960 if (retmode == RETURN_END_MINUS_ONE)
3961 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3964 return dest_addr;
3967 static rtx
3968 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3969 rtx target, tree orig_exp, memop_ret retmode)
3971 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3972 retmode, false);
3975 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3976 we failed, the caller should emit a normal call, otherwise try to
3977 get the result in TARGET, if convenient.
3978 Return value is based on RETMODE argument. */
3980 static rtx
3981 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
3983 class expand_operand ops[3];
3984 rtx dest_mem;
3985 rtx src_mem;
3987 if (!targetm.have_movstr ())
3988 return NULL_RTX;
3990 dest_mem = get_memory_rtx (dest, NULL);
3991 src_mem = get_memory_rtx (src, NULL);
3992 if (retmode == RETURN_BEGIN)
3994 target = force_reg (Pmode, XEXP (dest_mem, 0));
3995 dest_mem = replace_equiv_address (dest_mem, target);
3998 create_output_operand (&ops[0],
3999 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
4000 create_fixed_operand (&ops[1], dest_mem);
4001 create_fixed_operand (&ops[2], src_mem);
4002 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
4003 return NULL_RTX;
4005 if (retmode != RETURN_BEGIN && target != const0_rtx)
4007 target = ops[0].value;
4008 /* movstr is supposed to set end to the address of the NUL
4009 terminator. If the caller requested a mempcpy-like return value,
4010 adjust it. */
4011 if (retmode == RETURN_END)
4013 rtx tem = plus_constant (GET_MODE (target),
4014 gen_lowpart (GET_MODE (target), target), 1);
4015 emit_move_insn (target, force_operand (tem, NULL_RTX));
4018 return target;
4021 /* Do some very basic size validation of a call to the strcpy builtin
4022 given by EXP. Return NULL_RTX to have the built-in expand to a call
4023 to the library function. */
4025 static rtx
4026 expand_builtin_strcat (tree exp, rtx)
4028 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
4029 || !warn_stringop_overflow)
4030 return NULL_RTX;
4032 tree dest = CALL_EXPR_ARG (exp, 0);
4033 tree src = CALL_EXPR_ARG (exp, 1);
4035 /* There is no way here to determine the length of the string in
4036 the destination to which the SRC string is being appended so
4037 just diagnose cases when the souce string is longer than
4038 the destination object. */
4040 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4042 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
4043 destsize);
4045 return NULL_RTX;
4048 /* Expand expression EXP, which is a call to the strcpy builtin. Return
4049 NULL_RTX if we failed the caller should emit a normal call, otherwise
4050 try to get the result in TARGET, if convenient (and in mode MODE if that's
4051 convenient). */
4053 static rtx
4054 expand_builtin_strcpy (tree exp, rtx target)
4056 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4057 return NULL_RTX;
4059 tree dest = CALL_EXPR_ARG (exp, 0);
4060 tree src = CALL_EXPR_ARG (exp, 1);
4062 if (warn_stringop_overflow)
4064 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4065 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4066 src, destsize);
4069 if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
4071 /* Check to see if the argument was declared attribute nonstring
4072 and if so, issue a warning since at this point it's not known
4073 to be nul-terminated. */
4074 tree fndecl = get_callee_fndecl (exp);
4075 maybe_warn_nonstring_arg (fndecl, exp);
4076 return ret;
4079 return NULL_RTX;
4082 /* Helper function to do the actual work for expand_builtin_strcpy. The
4083 arguments to the builtin_strcpy call DEST and SRC are broken out
4084 so that this can also be called without constructing an actual CALL_EXPR.
4085 The other arguments and return value are the same as for
4086 expand_builtin_strcpy. */
4088 static rtx
4089 expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target)
4091 /* Detect strcpy calls with unterminated arrays.. */
4092 if (tree nonstr = unterminated_array (src))
4094 /* NONSTR refers to the non-nul terminated constant array. */
4095 if (!TREE_NO_WARNING (exp))
4096 warn_string_no_nul (EXPR_LOCATION (exp), "strcpy", src, nonstr);
4097 return NULL_RTX;
4100 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
4103 /* Expand a call EXP to the stpcpy builtin.
4104 Return NULL_RTX if we failed the caller should emit a normal call,
4105 otherwise try to get the result in TARGET, if convenient (and in
4106 mode MODE if that's convenient). */
4108 static rtx
4109 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
4111 tree dst, src;
4112 location_t loc = EXPR_LOCATION (exp);
4114 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4115 return NULL_RTX;
4117 dst = CALL_EXPR_ARG (exp, 0);
4118 src = CALL_EXPR_ARG (exp, 1);
4120 if (warn_stringop_overflow)
4122 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
4123 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4124 src, destsize);
4127 /* If return value is ignored, transform stpcpy into strcpy. */
4128 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
4130 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
4131 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
4132 return expand_expr (result, target, mode, EXPAND_NORMAL);
4134 else
4136 tree len, lenp1;
4137 rtx ret;
4139 /* Ensure we get an actual string whose length can be evaluated at
4140 compile-time, not an expression containing a string. This is
4141 because the latter will potentially produce pessimized code
4142 when used to produce the return value. */
4143 c_strlen_data lendata = { };
4144 if (!c_getstr (src, NULL)
4145 || !(len = c_strlen (src, 0, &lendata, 1)))
4146 return expand_movstr (dst, src, target,
4147 /*retmode=*/ RETURN_END_MINUS_ONE);
4149 if (lendata.decl && !TREE_NO_WARNING (exp))
4150 warn_string_no_nul (EXPR_LOCATION (exp), "stpcpy", src, lendata.decl);
4152 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
4153 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
4154 target, exp,
4155 /*retmode=*/ RETURN_END_MINUS_ONE);
4157 if (ret)
4158 return ret;
4160 if (TREE_CODE (len) == INTEGER_CST)
4162 rtx len_rtx = expand_normal (len);
4164 if (CONST_INT_P (len_rtx))
4166 ret = expand_builtin_strcpy_args (exp, dst, src, target);
4168 if (ret)
4170 if (! target)
4172 if (mode != VOIDmode)
4173 target = gen_reg_rtx (mode);
4174 else
4175 target = gen_reg_rtx (GET_MODE (ret));
4177 if (GET_MODE (target) != GET_MODE (ret))
4178 ret = gen_lowpart (GET_MODE (target), ret);
4180 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
4181 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
4182 gcc_assert (ret);
4184 return target;
4189 return expand_movstr (dst, src, target,
4190 /*retmode=*/ RETURN_END_MINUS_ONE);
4194 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
4195 arguments while being careful to avoid duplicate warnings (which could
4196 be issued if the expander were to expand the call, resulting in it
4197 being emitted in expand_call(). */
4199 static rtx
4200 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
4202 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
4204 /* The call has been successfully expanded. Check for nonstring
4205 arguments and issue warnings as appropriate. */
4206 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
4207 return ret;
4210 return NULL_RTX;
4213 /* Check a call EXP to the stpncpy built-in for validity.
4214 Return NULL_RTX on both success and failure. */
4216 static rtx
4217 expand_builtin_stpncpy (tree exp, rtx)
4219 if (!validate_arglist (exp,
4220 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4221 || !warn_stringop_overflow)
4222 return NULL_RTX;
4224 /* The source and destination of the call. */
4225 tree dest = CALL_EXPR_ARG (exp, 0);
4226 tree src = CALL_EXPR_ARG (exp, 1);
4228 /* The exact number of bytes to write (not the maximum). */
4229 tree len = CALL_EXPR_ARG (exp, 2);
4231 /* The size of the destination object. */
4232 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4234 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
4236 return NULL_RTX;
4239 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4240 bytes from constant string DATA + OFFSET and return it as target
4241 constant. */
4244 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
4245 scalar_int_mode mode)
4247 const char *str = (const char *) data;
4249 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4250 return const0_rtx;
4252 return c_readstr (str + offset, mode);
4255 /* Helper to check the sizes of sequences and the destination of calls
4256 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4257 success (no overflow or invalid sizes), false otherwise. */
4259 static bool
4260 check_strncat_sizes (tree exp, tree objsize)
4262 tree dest = CALL_EXPR_ARG (exp, 0);
4263 tree src = CALL_EXPR_ARG (exp, 1);
4264 tree maxread = CALL_EXPR_ARG (exp, 2);
4266 /* Try to determine the range of lengths that the source expression
4267 refers to. */
4268 c_strlen_data lendata = { };
4269 get_range_strlen (src, &lendata, /* eltsize = */ 1);
4271 /* Try to verify that the destination is big enough for the shortest
4272 string. */
4274 if (!objsize && warn_stringop_overflow)
4276 /* If it hasn't been provided by __strncat_chk, try to determine
4277 the size of the destination object into which the source is
4278 being copied. */
4279 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
4282 /* Add one for the terminating nul. */
4283 tree srclen = (lendata.minlen
4284 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
4285 size_one_node)
4286 : NULL_TREE);
4288 /* The strncat function copies at most MAXREAD bytes and always appends
4289 the terminating nul so the specified upper bound should never be equal
4290 to (or greater than) the size of the destination. */
4291 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4292 && tree_int_cst_equal (objsize, maxread))
4294 location_t loc = tree_nonartificial_location (exp);
4295 loc = expansion_point_location_if_in_system_header (loc);
4297 warning_at (loc, OPT_Wstringop_overflow_,
4298 "%K%qD specified bound %E equals destination size",
4299 exp, get_callee_fndecl (exp), maxread);
4301 return false;
4304 if (!srclen
4305 || (maxread && tree_fits_uhwi_p (maxread)
4306 && tree_fits_uhwi_p (srclen)
4307 && tree_int_cst_lt (maxread, srclen)))
4308 srclen = maxread;
4310 /* The number of bytes to write is LEN but check_access will also
4311 check SRCLEN if LEN's value isn't known. */
4312 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4313 objsize);
4316 /* Similar to expand_builtin_strcat, do some very basic size validation
4317 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4318 the built-in expand to a call to the library function. */
4320 static rtx
4321 expand_builtin_strncat (tree exp, rtx)
4323 if (!validate_arglist (exp,
4324 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4325 || !warn_stringop_overflow)
4326 return NULL_RTX;
4328 tree dest = CALL_EXPR_ARG (exp, 0);
4329 tree src = CALL_EXPR_ARG (exp, 1);
4330 /* The upper bound on the number of bytes to write. */
4331 tree maxread = CALL_EXPR_ARG (exp, 2);
4332 /* The length of the source sequence. */
4333 tree slen = c_strlen (src, 1);
4335 /* Try to determine the range of lengths that the source expression
4336 refers to. Since the lengths are only used for warning and not
4337 for code generation disable strict mode below. */
4338 tree maxlen = slen;
4339 if (!maxlen)
4341 c_strlen_data lendata = { };
4342 get_range_strlen (src, &lendata, /* eltsize = */ 1);
4343 maxlen = lendata.maxbound;
4346 /* Try to verify that the destination is big enough for the shortest
4347 string. First try to determine the size of the destination object
4348 into which the source is being copied. */
4349 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4351 /* Add one for the terminating nul. */
4352 tree srclen = (maxlen
4353 ? fold_build2 (PLUS_EXPR, size_type_node, maxlen,
4354 size_one_node)
4355 : NULL_TREE);
4357 /* The strncat function copies at most MAXREAD bytes and always appends
4358 the terminating nul so the specified upper bound should never be equal
4359 to (or greater than) the size of the destination. */
4360 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4361 && tree_int_cst_equal (destsize, maxread))
4363 location_t loc = tree_nonartificial_location (exp);
4364 loc = expansion_point_location_if_in_system_header (loc);
4366 warning_at (loc, OPT_Wstringop_overflow_,
4367 "%K%qD specified bound %E equals destination size",
4368 exp, get_callee_fndecl (exp), maxread);
4370 return NULL_RTX;
4373 if (!srclen
4374 || (maxread && tree_fits_uhwi_p (maxread)
4375 && tree_fits_uhwi_p (srclen)
4376 && tree_int_cst_lt (maxread, srclen)))
4377 srclen = maxread;
4379 /* The number of bytes to write is SRCLEN. */
4380 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
4382 return NULL_RTX;
4385 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4386 NULL_RTX if we failed the caller should emit a normal call. */
4388 static rtx
4389 expand_builtin_strncpy (tree exp, rtx target)
4391 location_t loc = EXPR_LOCATION (exp);
4393 if (validate_arglist (exp,
4394 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4396 tree dest = CALL_EXPR_ARG (exp, 0);
4397 tree src = CALL_EXPR_ARG (exp, 1);
4398 /* The number of bytes to write (not the maximum). */
4399 tree len = CALL_EXPR_ARG (exp, 2);
4400 /* The length of the source sequence. */
4401 tree slen = c_strlen (src, 1);
4403 if (warn_stringop_overflow)
4405 tree destsize = compute_objsize (dest,
4406 warn_stringop_overflow - 1);
4408 /* The number of bytes to write is LEN but check_access will also
4409 check SLEN if LEN's value isn't known. */
4410 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4411 destsize);
4414 /* We must be passed a constant len and src parameter. */
4415 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4416 return NULL_RTX;
4418 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4420 /* We're required to pad with trailing zeros if the requested
4421 len is greater than strlen(s2)+1. In that case try to
4422 use store_by_pieces, if it fails, punt. */
4423 if (tree_int_cst_lt (slen, len))
4425 unsigned int dest_align = get_pointer_alignment (dest);
4426 const char *p = c_getstr (src);
4427 rtx dest_mem;
4429 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4430 || !can_store_by_pieces (tree_to_uhwi (len),
4431 builtin_strncpy_read_str,
4432 CONST_CAST (char *, p),
4433 dest_align, false))
4434 return NULL_RTX;
4436 dest_mem = get_memory_rtx (dest, len);
4437 store_by_pieces (dest_mem, tree_to_uhwi (len),
4438 builtin_strncpy_read_str,
4439 CONST_CAST (char *, p), dest_align, false,
4440 RETURN_BEGIN);
4441 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4442 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4443 return dest_mem;
4446 return NULL_RTX;
4449 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4450 bytes from constant string DATA + OFFSET and return it as target
4451 constant. */
4454 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4455 scalar_int_mode mode)
4457 const char *c = (const char *) data;
4458 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4460 memset (p, *c, GET_MODE_SIZE (mode));
4462 return c_readstr (p, mode);
4465 /* Callback routine for store_by_pieces. Return the RTL of a register
4466 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4467 char value given in the RTL register data. For example, if mode is
4468 4 bytes wide, return the RTL for 0x01010101*data. */
4470 static rtx
4471 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4472 scalar_int_mode mode)
4474 rtx target, coeff;
4475 size_t size;
4476 char *p;
4478 size = GET_MODE_SIZE (mode);
4479 if (size == 1)
4480 return (rtx) data;
4482 p = XALLOCAVEC (char, size);
4483 memset (p, 1, size);
4484 coeff = c_readstr (p, mode);
4486 target = convert_to_mode (mode, (rtx) data, 1);
4487 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4488 return force_reg (mode, target);
4491 /* Expand expression EXP, which is a call to the memset builtin. Return
4492 NULL_RTX if we failed the caller should emit a normal call, otherwise
4493 try to get the result in TARGET, if convenient (and in mode MODE if that's
4494 convenient). */
4496 static rtx
4497 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4499 if (!validate_arglist (exp,
4500 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4501 return NULL_RTX;
4503 tree dest = CALL_EXPR_ARG (exp, 0);
4504 tree val = CALL_EXPR_ARG (exp, 1);
4505 tree len = CALL_EXPR_ARG (exp, 2);
4507 check_memop_access (exp, dest, NULL_TREE, len);
4509 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4512 /* Helper function to do the actual work for expand_builtin_memset. The
4513 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4514 so that this can also be called without constructing an actual CALL_EXPR.
4515 The other arguments and return value are the same as for
4516 expand_builtin_memset. */
4518 static rtx
4519 expand_builtin_memset_args (tree dest, tree val, tree len,
4520 rtx target, machine_mode mode, tree orig_exp)
4522 tree fndecl, fn;
4523 enum built_in_function fcode;
4524 machine_mode val_mode;
4525 char c;
4526 unsigned int dest_align;
4527 rtx dest_mem, dest_addr, len_rtx;
4528 HOST_WIDE_INT expected_size = -1;
4529 unsigned int expected_align = 0;
4530 unsigned HOST_WIDE_INT min_size;
4531 unsigned HOST_WIDE_INT max_size;
4532 unsigned HOST_WIDE_INT probable_max_size;
4534 dest_align = get_pointer_alignment (dest);
4536 /* If DEST is not a pointer type, don't do this operation in-line. */
4537 if (dest_align == 0)
4538 return NULL_RTX;
4540 if (currently_expanding_gimple_stmt)
4541 stringop_block_profile (currently_expanding_gimple_stmt,
4542 &expected_align, &expected_size);
4544 if (expected_align < dest_align)
4545 expected_align = dest_align;
4547 /* If the LEN parameter is zero, return DEST. */
4548 if (integer_zerop (len))
4550 /* Evaluate and ignore VAL in case it has side-effects. */
4551 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4552 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4555 /* Stabilize the arguments in case we fail. */
4556 dest = builtin_save_expr (dest);
4557 val = builtin_save_expr (val);
4558 len = builtin_save_expr (len);
4560 len_rtx = expand_normal (len);
4561 determine_block_size (len, len_rtx, &min_size, &max_size,
4562 &probable_max_size);
4563 dest_mem = get_memory_rtx (dest, len);
4564 val_mode = TYPE_MODE (unsigned_char_type_node);
4566 if (TREE_CODE (val) != INTEGER_CST)
4568 rtx val_rtx;
4570 val_rtx = expand_normal (val);
4571 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4573 /* Assume that we can memset by pieces if we can store
4574 * the coefficients by pieces (in the required modes).
4575 * We can't pass builtin_memset_gen_str as that emits RTL. */
4576 c = 1;
4577 if (tree_fits_uhwi_p (len)
4578 && can_store_by_pieces (tree_to_uhwi (len),
4579 builtin_memset_read_str, &c, dest_align,
4580 true))
4582 val_rtx = force_reg (val_mode, val_rtx);
4583 store_by_pieces (dest_mem, tree_to_uhwi (len),
4584 builtin_memset_gen_str, val_rtx, dest_align,
4585 true, RETURN_BEGIN);
4587 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4588 dest_align, expected_align,
4589 expected_size, min_size, max_size,
4590 probable_max_size))
4591 goto do_libcall;
4593 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4594 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4595 return dest_mem;
4598 if (target_char_cast (val, &c))
4599 goto do_libcall;
4601 if (c)
4603 if (tree_fits_uhwi_p (len)
4604 && can_store_by_pieces (tree_to_uhwi (len),
4605 builtin_memset_read_str, &c, dest_align,
4606 true))
4607 store_by_pieces (dest_mem, tree_to_uhwi (len),
4608 builtin_memset_read_str, &c, dest_align, true,
4609 RETURN_BEGIN);
4610 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4611 gen_int_mode (c, val_mode),
4612 dest_align, expected_align,
4613 expected_size, min_size, max_size,
4614 probable_max_size))
4615 goto do_libcall;
4617 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4618 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4619 return dest_mem;
4622 set_mem_align (dest_mem, dest_align);
4623 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4624 CALL_EXPR_TAILCALL (orig_exp)
4625 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4626 expected_align, expected_size,
4627 min_size, max_size,
4628 probable_max_size);
4630 if (dest_addr == 0)
4632 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4633 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4636 return dest_addr;
4638 do_libcall:
4639 fndecl = get_callee_fndecl (orig_exp);
4640 fcode = DECL_FUNCTION_CODE (fndecl);
4641 if (fcode == BUILT_IN_MEMSET)
4642 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4643 dest, val, len);
4644 else if (fcode == BUILT_IN_BZERO)
4645 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4646 dest, len);
4647 else
4648 gcc_unreachable ();
4649 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4650 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4651 return expand_call (fn, target, target == const0_rtx);
4654 /* Expand expression EXP, which is a call to the bzero builtin. Return
4655 NULL_RTX if we failed the caller should emit a normal call. */
4657 static rtx
4658 expand_builtin_bzero (tree exp)
4660 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4661 return NULL_RTX;
4663 tree dest = CALL_EXPR_ARG (exp, 0);
4664 tree size = CALL_EXPR_ARG (exp, 1);
4666 check_memop_access (exp, dest, NULL_TREE, size);
4668 /* New argument list transforming bzero(ptr x, int y) to
4669 memset(ptr x, int 0, size_t y). This is done this way
4670 so that if it isn't expanded inline, we fallback to
4671 calling bzero instead of memset. */
4673 location_t loc = EXPR_LOCATION (exp);
4675 return expand_builtin_memset_args (dest, integer_zero_node,
4676 fold_convert_loc (loc,
4677 size_type_node, size),
4678 const0_rtx, VOIDmode, exp);
4681 /* Try to expand cmpstr operation ICODE with the given operands.
4682 Return the result rtx on success, otherwise return null. */
4684 static rtx
4685 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4686 HOST_WIDE_INT align)
4688 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4690 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4691 target = NULL_RTX;
4693 class expand_operand ops[4];
4694 create_output_operand (&ops[0], target, insn_mode);
4695 create_fixed_operand (&ops[1], arg1_rtx);
4696 create_fixed_operand (&ops[2], arg2_rtx);
4697 create_integer_operand (&ops[3], align);
4698 if (maybe_expand_insn (icode, 4, ops))
4699 return ops[0].value;
4700 return NULL_RTX;
4703 /* Expand expression EXP, which is a call to the memcmp built-in function.
4704 Return NULL_RTX if we failed and the caller should emit a normal call,
4705 otherwise try to get the result in TARGET, if convenient.
4706 RESULT_EQ is true if we can relax the returned value to be either zero
4707 or nonzero, without caring about the sign. */
4709 static rtx
4710 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4712 if (!validate_arglist (exp,
4713 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4714 return NULL_RTX;
4716 tree arg1 = CALL_EXPR_ARG (exp, 0);
4717 tree arg2 = CALL_EXPR_ARG (exp, 1);
4718 tree len = CALL_EXPR_ARG (exp, 2);
4719 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4720 bool no_overflow = true;
4722 /* Diagnose calls where the specified length exceeds the size of either
4723 object. */
4724 tree size = compute_objsize (arg1, 0);
4725 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4726 len, /*maxread=*/NULL_TREE, size,
4727 /*objsize=*/NULL_TREE);
4728 if (no_overflow)
4730 size = compute_objsize (arg2, 0);
4731 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4732 len, /*maxread=*/NULL_TREE, size,
4733 /*objsize=*/NULL_TREE);
4736 /* If the specified length exceeds the size of either object,
4737 call the function. */
4738 if (!no_overflow)
4739 return NULL_RTX;
4741 /* Due to the performance benefit, always inline the calls first
4742 when result_eq is false. */
4743 rtx result = NULL_RTX;
4745 if (!result_eq && fcode != BUILT_IN_BCMP)
4747 result = inline_expand_builtin_string_cmp (exp, target);
4748 if (result)
4749 return result;
4752 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4753 location_t loc = EXPR_LOCATION (exp);
4755 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4756 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4758 /* If we don't have POINTER_TYPE, call the function. */
4759 if (arg1_align == 0 || arg2_align == 0)
4760 return NULL_RTX;
4762 rtx arg1_rtx = get_memory_rtx (arg1, len);
4763 rtx arg2_rtx = get_memory_rtx (arg2, len);
4764 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4766 /* Set MEM_SIZE as appropriate. */
4767 if (CONST_INT_P (len_rtx))
4769 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4770 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4773 by_pieces_constfn constfn = NULL;
4775 const char *src_str = c_getstr (arg2);
4776 if (result_eq && src_str == NULL)
4778 src_str = c_getstr (arg1);
4779 if (src_str != NULL)
4780 std::swap (arg1_rtx, arg2_rtx);
4783 /* If SRC is a string constant and block move would be done
4784 by pieces, we can avoid loading the string from memory
4785 and only stored the computed constants. */
4786 if (src_str
4787 && CONST_INT_P (len_rtx)
4788 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4789 constfn = builtin_memcpy_read_str;
4791 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4792 TREE_TYPE (len), target,
4793 result_eq, constfn,
4794 CONST_CAST (char *, src_str));
4796 if (result)
4798 /* Return the value in the proper mode for this function. */
4799 if (GET_MODE (result) == mode)
4800 return result;
4802 if (target != 0)
4804 convert_move (target, result, 0);
4805 return target;
4808 return convert_to_mode (mode, result, 0);
4811 return NULL_RTX;
4814 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4815 if we failed the caller should emit a normal call, otherwise try to get
4816 the result in TARGET, if convenient. */
4818 static rtx
4819 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4821 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4822 return NULL_RTX;
4824 /* Due to the performance benefit, always inline the calls first. */
4825 rtx result = NULL_RTX;
4826 result = inline_expand_builtin_string_cmp (exp, target);
4827 if (result)
4828 return result;
4830 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4831 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4832 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4833 return NULL_RTX;
4835 tree arg1 = CALL_EXPR_ARG (exp, 0);
4836 tree arg2 = CALL_EXPR_ARG (exp, 1);
4838 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4839 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4841 /* If we don't have POINTER_TYPE, call the function. */
4842 if (arg1_align == 0 || arg2_align == 0)
4843 return NULL_RTX;
4845 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4846 arg1 = builtin_save_expr (arg1);
4847 arg2 = builtin_save_expr (arg2);
4849 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4850 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4852 /* Try to call cmpstrsi. */
4853 if (cmpstr_icode != CODE_FOR_nothing)
4854 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4855 MIN (arg1_align, arg2_align));
4857 /* Try to determine at least one length and call cmpstrnsi. */
4858 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4860 tree len;
4861 rtx arg3_rtx;
4863 tree len1 = c_strlen (arg1, 1);
4864 tree len2 = c_strlen (arg2, 1);
4866 if (len1)
4867 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4868 if (len2)
4869 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4871 /* If we don't have a constant length for the first, use the length
4872 of the second, if we know it. We don't require a constant for
4873 this case; some cost analysis could be done if both are available
4874 but neither is constant. For now, assume they're equally cheap,
4875 unless one has side effects. If both strings have constant lengths,
4876 use the smaller. */
4878 if (!len1)
4879 len = len2;
4880 else if (!len2)
4881 len = len1;
4882 else if (TREE_SIDE_EFFECTS (len1))
4883 len = len2;
4884 else if (TREE_SIDE_EFFECTS (len2))
4885 len = len1;
4886 else if (TREE_CODE (len1) != INTEGER_CST)
4887 len = len2;
4888 else if (TREE_CODE (len2) != INTEGER_CST)
4889 len = len1;
4890 else if (tree_int_cst_lt (len1, len2))
4891 len = len1;
4892 else
4893 len = len2;
4895 /* If both arguments have side effects, we cannot optimize. */
4896 if (len && !TREE_SIDE_EFFECTS (len))
4898 arg3_rtx = expand_normal (len);
4899 result = expand_cmpstrn_or_cmpmem
4900 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4901 arg3_rtx, MIN (arg1_align, arg2_align));
4905 tree fndecl = get_callee_fndecl (exp);
4906 if (result)
4908 /* Check to see if the argument was declared attribute nonstring
4909 and if so, issue a warning since at this point it's not known
4910 to be nul-terminated. */
4911 maybe_warn_nonstring_arg (fndecl, exp);
4913 /* Return the value in the proper mode for this function. */
4914 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4915 if (GET_MODE (result) == mode)
4916 return result;
4917 if (target == 0)
4918 return convert_to_mode (mode, result, 0);
4919 convert_move (target, result, 0);
4920 return target;
4923 /* Expand the library call ourselves using a stabilized argument
4924 list to avoid re-evaluating the function's arguments twice. */
4925 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4926 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4927 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4928 return expand_call (fn, target, target == const0_rtx);
4931 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4932 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4933 the result in TARGET, if convenient. */
4935 static rtx
4936 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4937 ATTRIBUTE_UNUSED machine_mode mode)
4939 if (!validate_arglist (exp,
4940 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4941 return NULL_RTX;
4943 /* Due to the performance benefit, always inline the calls first. */
4944 rtx result = NULL_RTX;
4945 result = inline_expand_builtin_string_cmp (exp, target);
4946 if (result)
4947 return result;
4949 /* If c_strlen can determine an expression for one of the string
4950 lengths, and it doesn't have side effects, then emit cmpstrnsi
4951 using length MIN(strlen(string)+1, arg3). */
4952 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4953 if (cmpstrn_icode == CODE_FOR_nothing)
4954 return NULL_RTX;
4956 tree len;
4958 tree arg1 = CALL_EXPR_ARG (exp, 0);
4959 tree arg2 = CALL_EXPR_ARG (exp, 1);
4960 tree arg3 = CALL_EXPR_ARG (exp, 2);
4962 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4963 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4965 tree len1 = c_strlen (arg1, 1);
4966 tree len2 = c_strlen (arg2, 1);
4968 location_t loc = EXPR_LOCATION (exp);
4970 if (len1)
4971 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4972 if (len2)
4973 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4975 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4977 /* If we don't have a constant length for the first, use the length
4978 of the second, if we know it. If neither string is constant length,
4979 use the given length argument. We don't require a constant for
4980 this case; some cost analysis could be done if both are available
4981 but neither is constant. For now, assume they're equally cheap,
4982 unless one has side effects. If both strings have constant lengths,
4983 use the smaller. */
4985 if (!len1 && !len2)
4986 len = len3;
4987 else if (!len1)
4988 len = len2;
4989 else if (!len2)
4990 len = len1;
4991 else if (TREE_SIDE_EFFECTS (len1))
4992 len = len2;
4993 else if (TREE_SIDE_EFFECTS (len2))
4994 len = len1;
4995 else if (TREE_CODE (len1) != INTEGER_CST)
4996 len = len2;
4997 else if (TREE_CODE (len2) != INTEGER_CST)
4998 len = len1;
4999 else if (tree_int_cst_lt (len1, len2))
5000 len = len1;
5001 else
5002 len = len2;
5004 /* If we are not using the given length, we must incorporate it here.
5005 The actual new length parameter will be MIN(len,arg3) in this case. */
5006 if (len != len3)
5008 len = fold_convert_loc (loc, sizetype, len);
5009 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
5011 rtx arg1_rtx = get_memory_rtx (arg1, len);
5012 rtx arg2_rtx = get_memory_rtx (arg2, len);
5013 rtx arg3_rtx = expand_normal (len);
5014 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
5015 arg2_rtx, TREE_TYPE (len), arg3_rtx,
5016 MIN (arg1_align, arg2_align));
5018 tree fndecl = get_callee_fndecl (exp);
5019 if (result)
5021 /* Check to see if the argument was declared attribute nonstring
5022 and if so, issue a warning since at this point it's not known
5023 to be nul-terminated. */
5024 maybe_warn_nonstring_arg (fndecl, exp);
5026 /* Return the value in the proper mode for this function. */
5027 mode = TYPE_MODE (TREE_TYPE (exp));
5028 if (GET_MODE (result) == mode)
5029 return result;
5030 if (target == 0)
5031 return convert_to_mode (mode, result, 0);
5032 convert_move (target, result, 0);
5033 return target;
5036 /* Expand the library call ourselves using a stabilized argument
5037 list to avoid re-evaluating the function's arguments twice. */
5038 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
5039 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5040 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5041 return expand_call (fn, target, target == const0_rtx);
5044 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
5045 if that's convenient. */
5048 expand_builtin_saveregs (void)
5050 rtx val;
5051 rtx_insn *seq;
5053 /* Don't do __builtin_saveregs more than once in a function.
5054 Save the result of the first call and reuse it. */
5055 if (saveregs_value != 0)
5056 return saveregs_value;
5058 /* When this function is called, it means that registers must be
5059 saved on entry to this function. So we migrate the call to the
5060 first insn of this function. */
5062 start_sequence ();
5064 /* Do whatever the machine needs done in this case. */
5065 val = targetm.calls.expand_builtin_saveregs ();
5067 seq = get_insns ();
5068 end_sequence ();
5070 saveregs_value = val;
5072 /* Put the insns after the NOTE that starts the function. If this
5073 is inside a start_sequence, make the outer-level insn chain current, so
5074 the code is placed at the start of the function. */
5075 push_topmost_sequence ();
5076 emit_insn_after (seq, entry_of_function ());
5077 pop_topmost_sequence ();
5079 return val;
5082 /* Expand a call to __builtin_next_arg. */
5084 static rtx
5085 expand_builtin_next_arg (void)
5087 /* Checking arguments is already done in fold_builtin_next_arg
5088 that must be called before this function. */
5089 return expand_binop (ptr_mode, add_optab,
5090 crtl->args.internal_arg_pointer,
5091 crtl->args.arg_offset_rtx,
5092 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5095 /* Make it easier for the backends by protecting the valist argument
5096 from multiple evaluations. */
5098 static tree
5099 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
5101 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
5103 /* The current way of determining the type of valist is completely
5104 bogus. We should have the information on the va builtin instead. */
5105 if (!vatype)
5106 vatype = targetm.fn_abi_va_list (cfun->decl);
5108 if (TREE_CODE (vatype) == ARRAY_TYPE)
5110 if (TREE_SIDE_EFFECTS (valist))
5111 valist = save_expr (valist);
5113 /* For this case, the backends will be expecting a pointer to
5114 vatype, but it's possible we've actually been given an array
5115 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
5116 So fix it. */
5117 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5119 tree p1 = build_pointer_type (TREE_TYPE (vatype));
5120 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
5123 else
5125 tree pt = build_pointer_type (vatype);
5127 if (! needs_lvalue)
5129 if (! TREE_SIDE_EFFECTS (valist))
5130 return valist;
5132 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
5133 TREE_SIDE_EFFECTS (valist) = 1;
5136 if (TREE_SIDE_EFFECTS (valist))
5137 valist = save_expr (valist);
5138 valist = fold_build2_loc (loc, MEM_REF,
5139 vatype, valist, build_int_cst (pt, 0));
5142 return valist;
5145 /* The "standard" definition of va_list is void*. */
5147 tree
5148 std_build_builtin_va_list (void)
5150 return ptr_type_node;
5153 /* The "standard" abi va_list is va_list_type_node. */
5155 tree
5156 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
5158 return va_list_type_node;
5161 /* The "standard" type of va_list is va_list_type_node. */
5163 tree
5164 std_canonical_va_list_type (tree type)
5166 tree wtype, htype;
5168 wtype = va_list_type_node;
5169 htype = type;
5171 if (TREE_CODE (wtype) == ARRAY_TYPE)
5173 /* If va_list is an array type, the argument may have decayed
5174 to a pointer type, e.g. by being passed to another function.
5175 In that case, unwrap both types so that we can compare the
5176 underlying records. */
5177 if (TREE_CODE (htype) == ARRAY_TYPE
5178 || POINTER_TYPE_P (htype))
5180 wtype = TREE_TYPE (wtype);
5181 htype = TREE_TYPE (htype);
5184 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
5185 return va_list_type_node;
5187 return NULL_TREE;
5190 /* The "standard" implementation of va_start: just assign `nextarg' to
5191 the variable. */
5193 void
5194 std_expand_builtin_va_start (tree valist, rtx nextarg)
5196 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
5197 convert_move (va_r, nextarg, 0);
5200 /* Expand EXP, a call to __builtin_va_start. */
5202 static rtx
5203 expand_builtin_va_start (tree exp)
5205 rtx nextarg;
5206 tree valist;
5207 location_t loc = EXPR_LOCATION (exp);
5209 if (call_expr_nargs (exp) < 2)
5211 error_at (loc, "too few arguments to function %<va_start%>");
5212 return const0_rtx;
5215 if (fold_builtin_next_arg (exp, true))
5216 return const0_rtx;
5218 nextarg = expand_builtin_next_arg ();
5219 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
5221 if (targetm.expand_builtin_va_start)
5222 targetm.expand_builtin_va_start (valist, nextarg);
5223 else
5224 std_expand_builtin_va_start (valist, nextarg);
5226 return const0_rtx;
5229 /* Expand EXP, a call to __builtin_va_end. */
5231 static rtx
5232 expand_builtin_va_end (tree exp)
5234 tree valist = CALL_EXPR_ARG (exp, 0);
5236 /* Evaluate for side effects, if needed. I hate macros that don't
5237 do that. */
5238 if (TREE_SIDE_EFFECTS (valist))
5239 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5241 return const0_rtx;
5244 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5245 builtin rather than just as an assignment in stdarg.h because of the
5246 nastiness of array-type va_list types. */
5248 static rtx
5249 expand_builtin_va_copy (tree exp)
5251 tree dst, src, t;
5252 location_t loc = EXPR_LOCATION (exp);
5254 dst = CALL_EXPR_ARG (exp, 0);
5255 src = CALL_EXPR_ARG (exp, 1);
5257 dst = stabilize_va_list_loc (loc, dst, 1);
5258 src = stabilize_va_list_loc (loc, src, 0);
5260 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5262 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5264 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5265 TREE_SIDE_EFFECTS (t) = 1;
5266 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5268 else
5270 rtx dstb, srcb, size;
5272 /* Evaluate to pointers. */
5273 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5274 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5275 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5276 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5278 dstb = convert_memory_address (Pmode, dstb);
5279 srcb = convert_memory_address (Pmode, srcb);
5281 /* "Dereference" to BLKmode memories. */
5282 dstb = gen_rtx_MEM (BLKmode, dstb);
5283 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5284 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5285 srcb = gen_rtx_MEM (BLKmode, srcb);
5286 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5287 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5289 /* Copy. */
5290 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5293 return const0_rtx;
5296 /* Expand a call to one of the builtin functions __builtin_frame_address or
5297 __builtin_return_address. */
5299 static rtx
5300 expand_builtin_frame_address (tree fndecl, tree exp)
5302 /* The argument must be a nonnegative integer constant.
5303 It counts the number of frames to scan up the stack.
5304 The value is either the frame pointer value or the return
5305 address saved in that frame. */
5306 if (call_expr_nargs (exp) == 0)
5307 /* Warning about missing arg was already issued. */
5308 return const0_rtx;
5309 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5311 error ("invalid argument to %qD", fndecl);
5312 return const0_rtx;
5314 else
5316 /* Number of frames to scan up the stack. */
5317 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5319 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5321 /* Some ports cannot access arbitrary stack frames. */
5322 if (tem == NULL)
5324 warning (0, "unsupported argument to %qD", fndecl);
5325 return const0_rtx;
5328 if (count)
5330 /* Warn since no effort is made to ensure that any frame
5331 beyond the current one exists or can be safely reached. */
5332 warning (OPT_Wframe_address, "calling %qD with "
5333 "a nonzero argument is unsafe", fndecl);
5336 /* For __builtin_frame_address, return what we've got. */
5337 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5338 return tem;
5340 if (!REG_P (tem)
5341 && ! CONSTANT_P (tem))
5342 tem = copy_addr_to_reg (tem);
5343 return tem;
5347 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5348 failed and the caller should emit a normal call. */
5350 static rtx
5351 expand_builtin_alloca (tree exp)
5353 rtx op0;
5354 rtx result;
5355 unsigned int align;
5356 tree fndecl = get_callee_fndecl (exp);
5357 HOST_WIDE_INT max_size;
5358 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5359 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5360 bool valid_arglist
5361 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5362 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5363 VOID_TYPE)
5364 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5365 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5366 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5368 if (!valid_arglist)
5369 return NULL_RTX;
5371 if ((alloca_for_var
5372 && warn_vla_limit >= HOST_WIDE_INT_MAX
5373 && warn_alloc_size_limit < warn_vla_limit)
5374 || (!alloca_for_var
5375 && warn_alloca_limit >= HOST_WIDE_INT_MAX
5376 && warn_alloc_size_limit < warn_alloca_limit
5379 /* -Walloca-larger-than and -Wvla-larger-than settings of
5380 less than HOST_WIDE_INT_MAX override the more general
5381 -Walloc-size-larger-than so unless either of the former
5382 options is smaller than the last one (wchich would imply
5383 that the call was already checked), check the alloca
5384 arguments for overflow. */
5385 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5386 int idx[] = { 0, -1 };
5387 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5390 /* Compute the argument. */
5391 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5393 /* Compute the alignment. */
5394 align = (fcode == BUILT_IN_ALLOCA
5395 ? BIGGEST_ALIGNMENT
5396 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5398 /* Compute the maximum size. */
5399 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5400 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5401 : -1);
5403 /* Allocate the desired space. If the allocation stems from the declaration
5404 of a variable-sized object, it cannot accumulate. */
5405 result
5406 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5407 result = convert_memory_address (ptr_mode, result);
5409 /* Dynamic allocations for variables are recorded during gimplification. */
5410 if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
5411 record_dynamic_alloc (exp);
5413 return result;
5416 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5417 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5418 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5419 handle_builtin_stack_restore function. */
5421 static rtx
5422 expand_asan_emit_allocas_unpoison (tree exp)
5424 tree arg0 = CALL_EXPR_ARG (exp, 0);
5425 tree arg1 = CALL_EXPR_ARG (exp, 1);
5426 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5427 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5428 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5429 stack_pointer_rtx, NULL_RTX, 0,
5430 OPTAB_LIB_WIDEN);
5431 off = convert_modes (ptr_mode, Pmode, off, 0);
5432 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5433 OPTAB_LIB_WIDEN);
5434 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5435 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5436 top, ptr_mode, bot, ptr_mode);
5437 return ret;
5440 /* Expand a call to bswap builtin in EXP.
5441 Return NULL_RTX if a normal call should be emitted rather than expanding the
5442 function in-line. If convenient, the result should be placed in TARGET.
5443 SUBTARGET may be used as the target for computing one of EXP's operands. */
5445 static rtx
5446 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5447 rtx subtarget)
5449 tree arg;
5450 rtx op0;
5452 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5453 return NULL_RTX;
5455 arg = CALL_EXPR_ARG (exp, 0);
5456 op0 = expand_expr (arg,
5457 subtarget && GET_MODE (subtarget) == target_mode
5458 ? subtarget : NULL_RTX,
5459 target_mode, EXPAND_NORMAL);
5460 if (GET_MODE (op0) != target_mode)
5461 op0 = convert_to_mode (target_mode, op0, 1);
5463 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5465 gcc_assert (target);
5467 return convert_to_mode (target_mode, target, 1);
5470 /* Expand a call to a unary builtin in EXP.
5471 Return NULL_RTX if a normal call should be emitted rather than expanding the
5472 function in-line. If convenient, the result should be placed in TARGET.
5473 SUBTARGET may be used as the target for computing one of EXP's operands. */
5475 static rtx
5476 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5477 rtx subtarget, optab op_optab)
5479 rtx op0;
5481 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5482 return NULL_RTX;
5484 /* Compute the argument. */
5485 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5486 (subtarget
5487 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5488 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5489 VOIDmode, EXPAND_NORMAL);
5490 /* Compute op, into TARGET if possible.
5491 Set TARGET to wherever the result comes back. */
5492 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5493 op_optab, op0, target, op_optab != clrsb_optab);
5494 gcc_assert (target);
5496 return convert_to_mode (target_mode, target, 0);
5499 /* Expand a call to __builtin_expect. We just return our argument
5500 as the builtin_expect semantic should've been already executed by
5501 tree branch prediction pass. */
5503 static rtx
5504 expand_builtin_expect (tree exp, rtx target)
5506 tree arg;
5508 if (call_expr_nargs (exp) < 2)
5509 return const0_rtx;
5510 arg = CALL_EXPR_ARG (exp, 0);
5512 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5513 /* When guessing was done, the hints should be already stripped away. */
5514 gcc_assert (!flag_guess_branch_prob
5515 || optimize == 0 || seen_error ());
5516 return target;
5519 /* Expand a call to __builtin_expect_with_probability. We just return our
5520 argument as the builtin_expect semantic should've been already executed by
5521 tree branch prediction pass. */
5523 static rtx
5524 expand_builtin_expect_with_probability (tree exp, rtx target)
5526 tree arg;
5528 if (call_expr_nargs (exp) < 3)
5529 return const0_rtx;
5530 arg = CALL_EXPR_ARG (exp, 0);
5532 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5533 /* When guessing was done, the hints should be already stripped away. */
5534 gcc_assert (!flag_guess_branch_prob
5535 || optimize == 0 || seen_error ());
5536 return target;
5540 /* Expand a call to __builtin_assume_aligned. We just return our first
5541 argument as the builtin_assume_aligned semantic should've been already
5542 executed by CCP. */
5544 static rtx
5545 expand_builtin_assume_aligned (tree exp, rtx target)
5547 if (call_expr_nargs (exp) < 2)
5548 return const0_rtx;
5549 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5550 EXPAND_NORMAL);
5551 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5552 && (call_expr_nargs (exp) < 3
5553 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5554 return target;
5557 void
5558 expand_builtin_trap (void)
5560 if (targetm.have_trap ())
5562 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5563 /* For trap insns when not accumulating outgoing args force
5564 REG_ARGS_SIZE note to prevent crossjumping of calls with
5565 different args sizes. */
5566 if (!ACCUMULATE_OUTGOING_ARGS)
5567 add_args_size_note (insn, stack_pointer_delta);
5569 else
5571 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5572 tree call_expr = build_call_expr (fn, 0);
5573 expand_call (call_expr, NULL_RTX, false);
5576 emit_barrier ();
5579 /* Expand a call to __builtin_unreachable. We do nothing except emit
5580 a barrier saying that control flow will not pass here.
5582 It is the responsibility of the program being compiled to ensure
5583 that control flow does never reach __builtin_unreachable. */
5584 static void
5585 expand_builtin_unreachable (void)
5587 emit_barrier ();
5590 /* Expand EXP, a call to fabs, fabsf or fabsl.
5591 Return NULL_RTX if a normal call should be emitted rather than expanding
5592 the function inline. If convenient, the result should be placed
5593 in TARGET. SUBTARGET may be used as the target for computing
5594 the operand. */
5596 static rtx
5597 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5599 machine_mode mode;
5600 tree arg;
5601 rtx op0;
5603 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5604 return NULL_RTX;
5606 arg = CALL_EXPR_ARG (exp, 0);
5607 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5608 mode = TYPE_MODE (TREE_TYPE (arg));
5609 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5610 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5613 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5614 Return NULL is a normal call should be emitted rather than expanding the
5615 function inline. If convenient, the result should be placed in TARGET.
5616 SUBTARGET may be used as the target for computing the operand. */
5618 static rtx
5619 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5621 rtx op0, op1;
5622 tree arg;
5624 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5625 return NULL_RTX;
5627 arg = CALL_EXPR_ARG (exp, 0);
5628 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5630 arg = CALL_EXPR_ARG (exp, 1);
5631 op1 = expand_normal (arg);
5633 return expand_copysign (op0, op1, target);
5636 /* Expand a call to __builtin___clear_cache. */
5638 static rtx
5639 expand_builtin___clear_cache (tree exp)
5641 if (!targetm.code_for_clear_cache)
5643 #ifdef CLEAR_INSN_CACHE
5644 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5645 does something. Just do the default expansion to a call to
5646 __clear_cache(). */
5647 return NULL_RTX;
5648 #else
5649 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5650 does nothing. There is no need to call it. Do nothing. */
5651 return const0_rtx;
5652 #endif /* CLEAR_INSN_CACHE */
5655 /* We have a "clear_cache" insn, and it will handle everything. */
5656 tree begin, end;
5657 rtx begin_rtx, end_rtx;
5659 /* We must not expand to a library call. If we did, any
5660 fallback library function in libgcc that might contain a call to
5661 __builtin___clear_cache() would recurse infinitely. */
5662 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5664 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5665 return const0_rtx;
5668 if (targetm.have_clear_cache ())
5670 class expand_operand ops[2];
5672 begin = CALL_EXPR_ARG (exp, 0);
5673 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5675 end = CALL_EXPR_ARG (exp, 1);
5676 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5678 create_address_operand (&ops[0], begin_rtx);
5679 create_address_operand (&ops[1], end_rtx);
5680 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5681 return const0_rtx;
5683 return const0_rtx;
5686 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5688 static rtx
5689 round_trampoline_addr (rtx tramp)
5691 rtx temp, addend, mask;
5693 /* If we don't need too much alignment, we'll have been guaranteed
5694 proper alignment by get_trampoline_type. */
5695 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5696 return tramp;
5698 /* Round address up to desired boundary. */
5699 temp = gen_reg_rtx (Pmode);
5700 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5701 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5703 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5704 temp, 0, OPTAB_LIB_WIDEN);
5705 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5706 temp, 0, OPTAB_LIB_WIDEN);
5708 return tramp;
5711 static rtx
5712 expand_builtin_init_trampoline (tree exp, bool onstack)
5714 tree t_tramp, t_func, t_chain;
5715 rtx m_tramp, r_tramp, r_chain, tmp;
5717 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5718 POINTER_TYPE, VOID_TYPE))
5719 return NULL_RTX;
5721 t_tramp = CALL_EXPR_ARG (exp, 0);
5722 t_func = CALL_EXPR_ARG (exp, 1);
5723 t_chain = CALL_EXPR_ARG (exp, 2);
5725 r_tramp = expand_normal (t_tramp);
5726 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5727 MEM_NOTRAP_P (m_tramp) = 1;
5729 /* If ONSTACK, the TRAMP argument should be the address of a field
5730 within the local function's FRAME decl. Either way, let's see if
5731 we can fill in the MEM_ATTRs for this memory. */
5732 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5733 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5735 /* Creator of a heap trampoline is responsible for making sure the
5736 address is aligned to at least STACK_BOUNDARY. Normally malloc
5737 will ensure this anyhow. */
5738 tmp = round_trampoline_addr (r_tramp);
5739 if (tmp != r_tramp)
5741 m_tramp = change_address (m_tramp, BLKmode, tmp);
5742 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5743 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5746 /* The FUNC argument should be the address of the nested function.
5747 Extract the actual function decl to pass to the hook. */
5748 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5749 t_func = TREE_OPERAND (t_func, 0);
5750 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5752 r_chain = expand_normal (t_chain);
5754 /* Generate insns to initialize the trampoline. */
5755 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5757 if (onstack)
5759 trampolines_created = 1;
5761 if (targetm.calls.custom_function_descriptors != 0)
5762 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5763 "trampoline generated for nested function %qD", t_func);
5766 return const0_rtx;
5769 static rtx
5770 expand_builtin_adjust_trampoline (tree exp)
5772 rtx tramp;
5774 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5775 return NULL_RTX;
5777 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5778 tramp = round_trampoline_addr (tramp);
5779 if (targetm.calls.trampoline_adjust_address)
5780 tramp = targetm.calls.trampoline_adjust_address (tramp);
5782 return tramp;
5785 /* Expand a call to the builtin descriptor initialization routine.
5786 A descriptor is made up of a couple of pointers to the static
5787 chain and the code entry in this order. */
5789 static rtx
5790 expand_builtin_init_descriptor (tree exp)
5792 tree t_descr, t_func, t_chain;
5793 rtx m_descr, r_descr, r_func, r_chain;
5795 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5796 VOID_TYPE))
5797 return NULL_RTX;
5799 t_descr = CALL_EXPR_ARG (exp, 0);
5800 t_func = CALL_EXPR_ARG (exp, 1);
5801 t_chain = CALL_EXPR_ARG (exp, 2);
5803 r_descr = expand_normal (t_descr);
5804 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5805 MEM_NOTRAP_P (m_descr) = 1;
5806 set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
5808 r_func = expand_normal (t_func);
5809 r_chain = expand_normal (t_chain);
5811 /* Generate insns to initialize the descriptor. */
5812 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5813 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5814 POINTER_SIZE / BITS_PER_UNIT), r_func);
5816 return const0_rtx;
5819 /* Expand a call to the builtin descriptor adjustment routine. */
5821 static rtx
5822 expand_builtin_adjust_descriptor (tree exp)
5824 rtx tramp;
5826 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5827 return NULL_RTX;
5829 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5831 /* Unalign the descriptor to allow runtime identification. */
5832 tramp = plus_constant (ptr_mode, tramp,
5833 targetm.calls.custom_function_descriptors);
5835 return force_operand (tramp, NULL_RTX);
5838 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5839 function. The function first checks whether the back end provides
5840 an insn to implement signbit for the respective mode. If not, it
5841 checks whether the floating point format of the value is such that
5842 the sign bit can be extracted. If that is not the case, error out.
5843 EXP is the expression that is a call to the builtin function; if
5844 convenient, the result should be placed in TARGET. */
5845 static rtx
5846 expand_builtin_signbit (tree exp, rtx target)
5848 const struct real_format *fmt;
5849 scalar_float_mode fmode;
5850 scalar_int_mode rmode, imode;
5851 tree arg;
5852 int word, bitpos;
5853 enum insn_code icode;
5854 rtx temp;
5855 location_t loc = EXPR_LOCATION (exp);
5857 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5858 return NULL_RTX;
5860 arg = CALL_EXPR_ARG (exp, 0);
5861 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5862 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5863 fmt = REAL_MODE_FORMAT (fmode);
5865 arg = builtin_save_expr (arg);
5867 /* Expand the argument yielding a RTX expression. */
5868 temp = expand_normal (arg);
5870 /* Check if the back end provides an insn that handles signbit for the
5871 argument's mode. */
5872 icode = optab_handler (signbit_optab, fmode);
5873 if (icode != CODE_FOR_nothing)
5875 rtx_insn *last = get_last_insn ();
5876 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5877 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5878 return target;
5879 delete_insns_since (last);
5882 /* For floating point formats without a sign bit, implement signbit
5883 as "ARG < 0.0". */
5884 bitpos = fmt->signbit_ro;
5885 if (bitpos < 0)
5887 /* But we can't do this if the format supports signed zero. */
5888 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5890 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5891 build_real (TREE_TYPE (arg), dconst0));
5892 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5895 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5897 imode = int_mode_for_mode (fmode).require ();
5898 temp = gen_lowpart (imode, temp);
5900 else
5902 imode = word_mode;
5903 /* Handle targets with different FP word orders. */
5904 if (FLOAT_WORDS_BIG_ENDIAN)
5905 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5906 else
5907 word = bitpos / BITS_PER_WORD;
5908 temp = operand_subword_force (temp, word, fmode);
5909 bitpos = bitpos % BITS_PER_WORD;
5912 /* Force the intermediate word_mode (or narrower) result into a
5913 register. This avoids attempting to create paradoxical SUBREGs
5914 of floating point modes below. */
5915 temp = force_reg (imode, temp);
5917 /* If the bitpos is within the "result mode" lowpart, the operation
5918 can be implement with a single bitwise AND. Otherwise, we need
5919 a right shift and an AND. */
5921 if (bitpos < GET_MODE_BITSIZE (rmode))
5923 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5925 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5926 temp = gen_lowpart (rmode, temp);
5927 temp = expand_binop (rmode, and_optab, temp,
5928 immed_wide_int_const (mask, rmode),
5929 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5931 else
5933 /* Perform a logical right shift to place the signbit in the least
5934 significant bit, then truncate the result to the desired mode
5935 and mask just this bit. */
5936 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5937 temp = gen_lowpart (rmode, temp);
5938 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5939 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5942 return temp;
5945 /* Expand fork or exec calls. TARGET is the desired target of the
5946 call. EXP is the call. FN is the
5947 identificator of the actual function. IGNORE is nonzero if the
5948 value is to be ignored. */
5950 static rtx
5951 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5953 tree id, decl;
5954 tree call;
5956 /* If we are not profiling, just call the function. */
5957 if (!profile_arc_flag)
5958 return NULL_RTX;
5960 /* Otherwise call the wrapper. This should be equivalent for the rest of
5961 compiler, so the code does not diverge, and the wrapper may run the
5962 code necessary for keeping the profiling sane. */
5964 switch (DECL_FUNCTION_CODE (fn))
5966 case BUILT_IN_FORK:
5967 id = get_identifier ("__gcov_fork");
5968 break;
5970 case BUILT_IN_EXECL:
5971 id = get_identifier ("__gcov_execl");
5972 break;
5974 case BUILT_IN_EXECV:
5975 id = get_identifier ("__gcov_execv");
5976 break;
5978 case BUILT_IN_EXECLP:
5979 id = get_identifier ("__gcov_execlp");
5980 break;
5982 case BUILT_IN_EXECLE:
5983 id = get_identifier ("__gcov_execle");
5984 break;
5986 case BUILT_IN_EXECVP:
5987 id = get_identifier ("__gcov_execvp");
5988 break;
5990 case BUILT_IN_EXECVE:
5991 id = get_identifier ("__gcov_execve");
5992 break;
5994 default:
5995 gcc_unreachable ();
5998 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5999 FUNCTION_DECL, id, TREE_TYPE (fn));
6000 DECL_EXTERNAL (decl) = 1;
6001 TREE_PUBLIC (decl) = 1;
6002 DECL_ARTIFICIAL (decl) = 1;
6003 TREE_NOTHROW (decl) = 1;
6004 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
6005 DECL_VISIBILITY_SPECIFIED (decl) = 1;
6006 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
6007 return expand_call (call, target, ignore);
6012 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
6013 the pointer in these functions is void*, the tree optimizers may remove
6014 casts. The mode computed in expand_builtin isn't reliable either, due
6015 to __sync_bool_compare_and_swap.
6017 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
6018 group of builtins. This gives us log2 of the mode size. */
6020 static inline machine_mode
6021 get_builtin_sync_mode (int fcode_diff)
6023 /* The size is not negotiable, so ask not to get BLKmode in return
6024 if the target indicates that a smaller size would be better. */
6025 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
6028 /* Expand the memory expression LOC and return the appropriate memory operand
6029 for the builtin_sync operations. */
6031 static rtx
6032 get_builtin_sync_mem (tree loc, machine_mode mode)
6034 rtx addr, mem;
6035 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
6036 ? TREE_TYPE (TREE_TYPE (loc))
6037 : TREE_TYPE (loc));
6038 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
6040 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
6041 addr = convert_memory_address (addr_mode, addr);
6043 /* Note that we explicitly do not want any alias information for this
6044 memory, so that we kill all other live memories. Otherwise we don't
6045 satisfy the full barrier semantics of the intrinsic. */
6046 mem = gen_rtx_MEM (mode, addr);
6048 set_mem_addr_space (mem, addr_space);
6050 mem = validize_mem (mem);
6052 /* The alignment needs to be at least according to that of the mode. */
6053 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
6054 get_pointer_alignment (loc)));
6055 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6056 MEM_VOLATILE_P (mem) = 1;
6058 return mem;
6061 /* Make sure an argument is in the right mode.
6062 EXP is the tree argument.
6063 MODE is the mode it should be in. */
6065 static rtx
6066 expand_expr_force_mode (tree exp, machine_mode mode)
6068 rtx val;
6069 machine_mode old_mode;
6071 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
6072 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6073 of CONST_INTs, where we know the old_mode only from the call argument. */
6075 old_mode = GET_MODE (val);
6076 if (old_mode == VOIDmode)
6077 old_mode = TYPE_MODE (TREE_TYPE (exp));
6078 val = convert_modes (mode, old_mode, val, 1);
6079 return val;
6083 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6084 EXP is the CALL_EXPR. CODE is the rtx code
6085 that corresponds to the arithmetic or logical operation from the name;
6086 an exception here is that NOT actually means NAND. TARGET is an optional
6087 place for us to store the results; AFTER is true if this is the
6088 fetch_and_xxx form. */
6090 static rtx
6091 expand_builtin_sync_operation (machine_mode mode, tree exp,
6092 enum rtx_code code, bool after,
6093 rtx target)
6095 rtx val, mem;
6096 location_t loc = EXPR_LOCATION (exp);
6098 if (code == NOT && warn_sync_nand)
6100 tree fndecl = get_callee_fndecl (exp);
6101 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6103 static bool warned_f_a_n, warned_n_a_f;
6105 switch (fcode)
6107 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6108 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6109 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6110 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6111 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6112 if (warned_f_a_n)
6113 break;
6115 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
6116 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6117 warned_f_a_n = true;
6118 break;
6120 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6121 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6122 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6123 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6124 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6125 if (warned_n_a_f)
6126 break;
6128 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
6129 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6130 warned_n_a_f = true;
6131 break;
6133 default:
6134 gcc_unreachable ();
6138 /* Expand the operands. */
6139 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6140 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6142 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
6143 after);
6146 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6147 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6148 true if this is the boolean form. TARGET is a place for us to store the
6149 results; this is NOT optional if IS_BOOL is true. */
6151 static rtx
6152 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
6153 bool is_bool, rtx target)
6155 rtx old_val, new_val, mem;
6156 rtx *pbool, *poval;
6158 /* Expand the operands. */
6159 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6160 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6161 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6163 pbool = poval = NULL;
6164 if (target != const0_rtx)
6166 if (is_bool)
6167 pbool = &target;
6168 else
6169 poval = &target;
6171 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
6172 false, MEMMODEL_SYNC_SEQ_CST,
6173 MEMMODEL_SYNC_SEQ_CST))
6174 return NULL_RTX;
6176 return target;
6179 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6180 general form is actually an atomic exchange, and some targets only
6181 support a reduced form with the second argument being a constant 1.
6182 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6183 the results. */
6185 static rtx
6186 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
6187 rtx target)
6189 rtx val, mem;
6191 /* Expand the operands. */
6192 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6193 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6195 return expand_sync_lock_test_and_set (target, mem, val);
6198 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6200 static void
6201 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
6203 rtx mem;
6205 /* Expand the operands. */
6206 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6208 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
6211 /* Given an integer representing an ``enum memmodel'', verify its
6212 correctness and return the memory model enum. */
6214 static enum memmodel
6215 get_memmodel (tree exp)
6217 rtx op;
6218 unsigned HOST_WIDE_INT val;
6219 location_t loc
6220 = expansion_point_location_if_in_system_header (input_location);
6222 /* If the parameter is not a constant, it's a run time value so we'll just
6223 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6224 if (TREE_CODE (exp) != INTEGER_CST)
6225 return MEMMODEL_SEQ_CST;
6227 op = expand_normal (exp);
6229 val = INTVAL (op);
6230 if (targetm.memmodel_check)
6231 val = targetm.memmodel_check (val);
6232 else if (val & ~MEMMODEL_MASK)
6234 warning_at (loc, OPT_Winvalid_memory_model,
6235 "unknown architecture specifier in memory model to builtin");
6236 return MEMMODEL_SEQ_CST;
6239 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6240 if (memmodel_base (val) >= MEMMODEL_LAST)
6242 warning_at (loc, OPT_Winvalid_memory_model,
6243 "invalid memory model argument to builtin");
6244 return MEMMODEL_SEQ_CST;
6247 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6248 be conservative and promote consume to acquire. */
6249 if (val == MEMMODEL_CONSUME)
6250 val = MEMMODEL_ACQUIRE;
6252 return (enum memmodel) val;
6255 /* Expand the __atomic_exchange intrinsic:
6256 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6257 EXP is the CALL_EXPR.
6258 TARGET is an optional place for us to store the results. */
6260 static rtx
6261 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
6263 rtx val, mem;
6264 enum memmodel model;
6266 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6268 if (!flag_inline_atomics)
6269 return NULL_RTX;
6271 /* Expand the operands. */
6272 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6273 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6275 return expand_atomic_exchange (target, mem, val, model);
6278 /* Expand the __atomic_compare_exchange intrinsic:
6279 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6280 TYPE desired, BOOL weak,
6281 enum memmodel success,
6282 enum memmodel failure)
6283 EXP is the CALL_EXPR.
6284 TARGET is an optional place for us to store the results. */
6286 static rtx
6287 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
6288 rtx target)
6290 rtx expect, desired, mem, oldval;
6291 rtx_code_label *label;
6292 enum memmodel success, failure;
6293 tree weak;
6294 bool is_weak;
6295 location_t loc
6296 = expansion_point_location_if_in_system_header (input_location);
6298 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6299 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6301 if (failure > success)
6303 warning_at (loc, OPT_Winvalid_memory_model,
6304 "failure memory model cannot be stronger than success "
6305 "memory model for %<__atomic_compare_exchange%>");
6306 success = MEMMODEL_SEQ_CST;
6309 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6311 warning_at (loc, OPT_Winvalid_memory_model,
6312 "invalid failure memory model for "
6313 "%<__atomic_compare_exchange%>");
6314 failure = MEMMODEL_SEQ_CST;
6315 success = MEMMODEL_SEQ_CST;
6319 if (!flag_inline_atomics)
6320 return NULL_RTX;
6322 /* Expand the operands. */
6323 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6325 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6326 expect = convert_memory_address (Pmode, expect);
6327 expect = gen_rtx_MEM (mode, expect);
6328 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6330 weak = CALL_EXPR_ARG (exp, 3);
6331 is_weak = false;
6332 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6333 is_weak = true;
6335 if (target == const0_rtx)
6336 target = NULL;
6338 /* Lest the rtl backend create a race condition with an imporoper store
6339 to memory, always create a new pseudo for OLDVAL. */
6340 oldval = NULL;
6342 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6343 is_weak, success, failure))
6344 return NULL_RTX;
6346 /* Conditionally store back to EXPECT, lest we create a race condition
6347 with an improper store to memory. */
6348 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6349 the normal case where EXPECT is totally private, i.e. a register. At
6350 which point the store can be unconditional. */
6351 label = gen_label_rtx ();
6352 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6353 GET_MODE (target), 1, label);
6354 emit_move_insn (expect, oldval);
6355 emit_label (label);
6357 return target;
6360 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6361 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6362 call. The weak parameter must be dropped to match the expected parameter
6363 list and the expected argument changed from value to pointer to memory
6364 slot. */
6366 static void
6367 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6369 unsigned int z;
6370 vec<tree, va_gc> *vec;
6372 vec_alloc (vec, 5);
6373 vec->quick_push (gimple_call_arg (call, 0));
6374 tree expected = gimple_call_arg (call, 1);
6375 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6376 TREE_TYPE (expected));
6377 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6378 if (expd != x)
6379 emit_move_insn (x, expd);
6380 tree v = make_tree (TREE_TYPE (expected), x);
6381 vec->quick_push (build1 (ADDR_EXPR,
6382 build_pointer_type (TREE_TYPE (expected)), v));
6383 vec->quick_push (gimple_call_arg (call, 2));
6384 /* Skip the boolean weak parameter. */
6385 for (z = 4; z < 6; z++)
6386 vec->quick_push (gimple_call_arg (call, z));
6387 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6388 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6389 gcc_assert (bytes_log2 < 5);
6390 built_in_function fncode
6391 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6392 + bytes_log2);
6393 tree fndecl = builtin_decl_explicit (fncode);
6394 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6395 fndecl);
6396 tree exp = build_call_vec (boolean_type_node, fn, vec);
6397 tree lhs = gimple_call_lhs (call);
6398 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6399 if (lhs)
6401 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6402 if (GET_MODE (boolret) != mode)
6403 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6404 x = force_reg (mode, x);
6405 write_complex_part (target, boolret, true);
6406 write_complex_part (target, x, false);
6410 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6412 void
6413 expand_ifn_atomic_compare_exchange (gcall *call)
6415 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6416 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6417 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6418 rtx expect, desired, mem, oldval, boolret;
6419 enum memmodel success, failure;
6420 tree lhs;
6421 bool is_weak;
6422 location_t loc
6423 = expansion_point_location_if_in_system_header (gimple_location (call));
6425 success = get_memmodel (gimple_call_arg (call, 4));
6426 failure = get_memmodel (gimple_call_arg (call, 5));
6428 if (failure > success)
6430 warning_at (loc, OPT_Winvalid_memory_model,
6431 "failure memory model cannot be stronger than success "
6432 "memory model for %<__atomic_compare_exchange%>");
6433 success = MEMMODEL_SEQ_CST;
6436 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6438 warning_at (loc, OPT_Winvalid_memory_model,
6439 "invalid failure memory model for "
6440 "%<__atomic_compare_exchange%>");
6441 failure = MEMMODEL_SEQ_CST;
6442 success = MEMMODEL_SEQ_CST;
6445 if (!flag_inline_atomics)
6447 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6448 return;
6451 /* Expand the operands. */
6452 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6454 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6455 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6457 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6459 boolret = NULL;
6460 oldval = NULL;
6462 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6463 is_weak, success, failure))
6465 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6466 return;
6469 lhs = gimple_call_lhs (call);
6470 if (lhs)
6472 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6473 if (GET_MODE (boolret) != mode)
6474 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6475 write_complex_part (target, boolret, true);
6476 write_complex_part (target, oldval, false);
6480 /* Expand the __atomic_load intrinsic:
6481 TYPE __atomic_load (TYPE *object, enum memmodel)
6482 EXP is the CALL_EXPR.
6483 TARGET is an optional place for us to store the results. */
6485 static rtx
6486 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6488 rtx mem;
6489 enum memmodel model;
6491 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6492 if (is_mm_release (model) || is_mm_acq_rel (model))
6494 location_t loc
6495 = expansion_point_location_if_in_system_header (input_location);
6496 warning_at (loc, OPT_Winvalid_memory_model,
6497 "invalid memory model for %<__atomic_load%>");
6498 model = MEMMODEL_SEQ_CST;
6501 if (!flag_inline_atomics)
6502 return NULL_RTX;
6504 /* Expand the operand. */
6505 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6507 return expand_atomic_load (target, mem, model);
6511 /* Expand the __atomic_store intrinsic:
6512 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6513 EXP is the CALL_EXPR.
6514 TARGET is an optional place for us to store the results. */
6516 static rtx
6517 expand_builtin_atomic_store (machine_mode mode, tree exp)
6519 rtx mem, val;
6520 enum memmodel model;
6522 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6523 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6524 || is_mm_release (model)))
6526 location_t loc
6527 = expansion_point_location_if_in_system_header (input_location);
6528 warning_at (loc, OPT_Winvalid_memory_model,
6529 "invalid memory model for %<__atomic_store%>");
6530 model = MEMMODEL_SEQ_CST;
6533 if (!flag_inline_atomics)
6534 return NULL_RTX;
6536 /* Expand the operands. */
6537 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6538 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6540 return expand_atomic_store (mem, val, model, false);
6543 /* Expand the __atomic_fetch_XXX intrinsic:
6544 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6545 EXP is the CALL_EXPR.
6546 TARGET is an optional place for us to store the results.
6547 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6548 FETCH_AFTER is true if returning the result of the operation.
6549 FETCH_AFTER is false if returning the value before the operation.
6550 IGNORE is true if the result is not used.
6551 EXT_CALL is the correct builtin for an external call if this cannot be
6552 resolved to an instruction sequence. */
6554 static rtx
6555 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6556 enum rtx_code code, bool fetch_after,
6557 bool ignore, enum built_in_function ext_call)
6559 rtx val, mem, ret;
6560 enum memmodel model;
6561 tree fndecl;
6562 tree addr;
6564 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6566 /* Expand the operands. */
6567 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6568 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6570 /* Only try generating instructions if inlining is turned on. */
6571 if (flag_inline_atomics)
6573 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6574 if (ret)
6575 return ret;
6578 /* Return if a different routine isn't needed for the library call. */
6579 if (ext_call == BUILT_IN_NONE)
6580 return NULL_RTX;
6582 /* Change the call to the specified function. */
6583 fndecl = get_callee_fndecl (exp);
6584 addr = CALL_EXPR_FN (exp);
6585 STRIP_NOPS (addr);
6587 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6588 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6590 /* If we will emit code after the call, the call cannot be a tail call.
6591 If it is emitted as a tail call, a barrier is emitted after it, and
6592 then all trailing code is removed. */
6593 if (!ignore)
6594 CALL_EXPR_TAILCALL (exp) = 0;
6596 /* Expand the call here so we can emit trailing code. */
6597 ret = expand_call (exp, target, ignore);
6599 /* Replace the original function just in case it matters. */
6600 TREE_OPERAND (addr, 0) = fndecl;
6602 /* Then issue the arithmetic correction to return the right result. */
6603 if (!ignore)
6605 if (code == NOT)
6607 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6608 OPTAB_LIB_WIDEN);
6609 ret = expand_simple_unop (mode, NOT, ret, target, true);
6611 else
6612 ret = expand_simple_binop (mode, code, ret, val, target, true,
6613 OPTAB_LIB_WIDEN);
6615 return ret;
6618 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6620 void
6621 expand_ifn_atomic_bit_test_and (gcall *call)
6623 tree ptr = gimple_call_arg (call, 0);
6624 tree bit = gimple_call_arg (call, 1);
6625 tree flag = gimple_call_arg (call, 2);
6626 tree lhs = gimple_call_lhs (call);
6627 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6628 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6629 enum rtx_code code;
6630 optab optab;
6631 class expand_operand ops[5];
6633 gcc_assert (flag_inline_atomics);
6635 if (gimple_call_num_args (call) == 4)
6636 model = get_memmodel (gimple_call_arg (call, 3));
6638 rtx mem = get_builtin_sync_mem (ptr, mode);
6639 rtx val = expand_expr_force_mode (bit, mode);
6641 switch (gimple_call_internal_fn (call))
6643 case IFN_ATOMIC_BIT_TEST_AND_SET:
6644 code = IOR;
6645 optab = atomic_bit_test_and_set_optab;
6646 break;
6647 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6648 code = XOR;
6649 optab = atomic_bit_test_and_complement_optab;
6650 break;
6651 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6652 code = AND;
6653 optab = atomic_bit_test_and_reset_optab;
6654 break;
6655 default:
6656 gcc_unreachable ();
6659 if (lhs == NULL_TREE)
6661 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6662 val, NULL_RTX, true, OPTAB_DIRECT);
6663 if (code == AND)
6664 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6665 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6666 return;
6669 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6670 enum insn_code icode = direct_optab_handler (optab, mode);
6671 gcc_assert (icode != CODE_FOR_nothing);
6672 create_output_operand (&ops[0], target, mode);
6673 create_fixed_operand (&ops[1], mem);
6674 create_convert_operand_to (&ops[2], val, mode, true);
6675 create_integer_operand (&ops[3], model);
6676 create_integer_operand (&ops[4], integer_onep (flag));
6677 if (maybe_expand_insn (icode, 5, ops))
6678 return;
6680 rtx bitval = val;
6681 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6682 val, NULL_RTX, true, OPTAB_DIRECT);
6683 rtx maskval = val;
6684 if (code == AND)
6685 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6686 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6687 code, model, false);
6688 if (integer_onep (flag))
6690 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6691 NULL_RTX, true, OPTAB_DIRECT);
6692 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6693 true, OPTAB_DIRECT);
6695 else
6696 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6697 OPTAB_DIRECT);
6698 if (result != target)
6699 emit_move_insn (target, result);
6702 /* Expand an atomic clear operation.
6703 void _atomic_clear (BOOL *obj, enum memmodel)
6704 EXP is the call expression. */
6706 static rtx
6707 expand_builtin_atomic_clear (tree exp)
6709 machine_mode mode;
6710 rtx mem, ret;
6711 enum memmodel model;
6713 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6714 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6715 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6717 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6719 location_t loc
6720 = expansion_point_location_if_in_system_header (input_location);
6721 warning_at (loc, OPT_Winvalid_memory_model,
6722 "invalid memory model for %<__atomic_store%>");
6723 model = MEMMODEL_SEQ_CST;
6726 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6727 Failing that, a store is issued by __atomic_store. The only way this can
6728 fail is if the bool type is larger than a word size. Unlikely, but
6729 handle it anyway for completeness. Assume a single threaded model since
6730 there is no atomic support in this case, and no barriers are required. */
6731 ret = expand_atomic_store (mem, const0_rtx, model, true);
6732 if (!ret)
6733 emit_move_insn (mem, const0_rtx);
6734 return const0_rtx;
6737 /* Expand an atomic test_and_set operation.
6738 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6739 EXP is the call expression. */
6741 static rtx
6742 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6744 rtx mem;
6745 enum memmodel model;
6746 machine_mode mode;
6748 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6749 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6750 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6752 return expand_atomic_test_and_set (target, mem, model);
6756 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6757 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6759 static tree
6760 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6762 int size;
6763 machine_mode mode;
6764 unsigned int mode_align, type_align;
6766 if (TREE_CODE (arg0) != INTEGER_CST)
6767 return NULL_TREE;
6769 /* We need a corresponding integer mode for the access to be lock-free. */
6770 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6771 if (!int_mode_for_size (size, 0).exists (&mode))
6772 return boolean_false_node;
6774 mode_align = GET_MODE_ALIGNMENT (mode);
6776 if (TREE_CODE (arg1) == INTEGER_CST)
6778 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6780 /* Either this argument is null, or it's a fake pointer encoding
6781 the alignment of the object. */
6782 val = least_bit_hwi (val);
6783 val *= BITS_PER_UNIT;
6785 if (val == 0 || mode_align < val)
6786 type_align = mode_align;
6787 else
6788 type_align = val;
6790 else
6792 tree ttype = TREE_TYPE (arg1);
6794 /* This function is usually invoked and folded immediately by the front
6795 end before anything else has a chance to look at it. The pointer
6796 parameter at this point is usually cast to a void *, so check for that
6797 and look past the cast. */
6798 if (CONVERT_EXPR_P (arg1)
6799 && POINTER_TYPE_P (ttype)
6800 && VOID_TYPE_P (TREE_TYPE (ttype))
6801 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6802 arg1 = TREE_OPERAND (arg1, 0);
6804 ttype = TREE_TYPE (arg1);
6805 gcc_assert (POINTER_TYPE_P (ttype));
6807 /* Get the underlying type of the object. */
6808 ttype = TREE_TYPE (ttype);
6809 type_align = TYPE_ALIGN (ttype);
6812 /* If the object has smaller alignment, the lock free routines cannot
6813 be used. */
6814 if (type_align < mode_align)
6815 return boolean_false_node;
6817 /* Check if a compare_and_swap pattern exists for the mode which represents
6818 the required size. The pattern is not allowed to fail, so the existence
6819 of the pattern indicates support is present. Also require that an
6820 atomic load exists for the required size. */
6821 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6822 return boolean_true_node;
6823 else
6824 return boolean_false_node;
6827 /* Return true if the parameters to call EXP represent an object which will
6828 always generate lock free instructions. The first argument represents the
6829 size of the object, and the second parameter is a pointer to the object
6830 itself. If NULL is passed for the object, then the result is based on
6831 typical alignment for an object of the specified size. Otherwise return
6832 false. */
6834 static rtx
6835 expand_builtin_atomic_always_lock_free (tree exp)
6837 tree size;
6838 tree arg0 = CALL_EXPR_ARG (exp, 0);
6839 tree arg1 = CALL_EXPR_ARG (exp, 1);
6841 if (TREE_CODE (arg0) != INTEGER_CST)
6843 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
6844 return const0_rtx;
6847 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6848 if (size == boolean_true_node)
6849 return const1_rtx;
6850 return const0_rtx;
6853 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6854 is lock free on this architecture. */
6856 static tree
6857 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6859 if (!flag_inline_atomics)
6860 return NULL_TREE;
6862 /* If it isn't always lock free, don't generate a result. */
6863 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6864 return boolean_true_node;
6866 return NULL_TREE;
6869 /* Return true if the parameters to call EXP represent an object which will
6870 always generate lock free instructions. The first argument represents the
6871 size of the object, and the second parameter is a pointer to the object
6872 itself. If NULL is passed for the object, then the result is based on
6873 typical alignment for an object of the specified size. Otherwise return
6874 NULL*/
6876 static rtx
6877 expand_builtin_atomic_is_lock_free (tree exp)
6879 tree size;
6880 tree arg0 = CALL_EXPR_ARG (exp, 0);
6881 tree arg1 = CALL_EXPR_ARG (exp, 1);
6883 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6885 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
6886 return NULL_RTX;
6889 if (!flag_inline_atomics)
6890 return NULL_RTX;
6892 /* If the value is known at compile time, return the RTX for it. */
6893 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6894 if (size == boolean_true_node)
6895 return const1_rtx;
6897 return NULL_RTX;
6900 /* Expand the __atomic_thread_fence intrinsic:
6901 void __atomic_thread_fence (enum memmodel)
6902 EXP is the CALL_EXPR. */
6904 static void
6905 expand_builtin_atomic_thread_fence (tree exp)
6907 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6908 expand_mem_thread_fence (model);
6911 /* Expand the __atomic_signal_fence intrinsic:
6912 void __atomic_signal_fence (enum memmodel)
6913 EXP is the CALL_EXPR. */
6915 static void
6916 expand_builtin_atomic_signal_fence (tree exp)
6918 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6919 expand_mem_signal_fence (model);
6922 /* Expand the __sync_synchronize intrinsic. */
6924 static void
6925 expand_builtin_sync_synchronize (void)
6927 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6930 static rtx
6931 expand_builtin_thread_pointer (tree exp, rtx target)
6933 enum insn_code icode;
6934 if (!validate_arglist (exp, VOID_TYPE))
6935 return const0_rtx;
6936 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6937 if (icode != CODE_FOR_nothing)
6939 class expand_operand op;
6940 /* If the target is not sutitable then create a new target. */
6941 if (target == NULL_RTX
6942 || !REG_P (target)
6943 || GET_MODE (target) != Pmode)
6944 target = gen_reg_rtx (Pmode);
6945 create_output_operand (&op, target, Pmode);
6946 expand_insn (icode, 1, &op);
6947 return target;
6949 error ("%<__builtin_thread_pointer%> is not supported on this target");
6950 return const0_rtx;
6953 static void
6954 expand_builtin_set_thread_pointer (tree exp)
6956 enum insn_code icode;
6957 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6958 return;
6959 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6960 if (icode != CODE_FOR_nothing)
6962 class expand_operand op;
6963 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6964 Pmode, EXPAND_NORMAL);
6965 create_input_operand (&op, val, Pmode);
6966 expand_insn (icode, 1, &op);
6967 return;
6969 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
6973 /* Emit code to restore the current value of stack. */
6975 static void
6976 expand_stack_restore (tree var)
6978 rtx_insn *prev;
6979 rtx sa = expand_normal (var);
6981 sa = convert_memory_address (Pmode, sa);
6983 prev = get_last_insn ();
6984 emit_stack_restore (SAVE_BLOCK, sa);
6986 record_new_stack_level ();
6988 fixup_args_size_notes (prev, get_last_insn (), 0);
6991 /* Emit code to save the current value of stack. */
6993 static rtx
6994 expand_stack_save (void)
6996 rtx ret = NULL_RTX;
6998 emit_stack_save (SAVE_BLOCK, &ret);
6999 return ret;
7002 /* Emit code to get the openacc gang, worker or vector id or size. */
7004 static rtx
7005 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
7007 const char *name;
7008 rtx fallback_retval;
7009 rtx_insn *(*gen_fn) (rtx, rtx);
7010 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
7012 case BUILT_IN_GOACC_PARLEVEL_ID:
7013 name = "__builtin_goacc_parlevel_id";
7014 fallback_retval = const0_rtx;
7015 gen_fn = targetm.gen_oacc_dim_pos;
7016 break;
7017 case BUILT_IN_GOACC_PARLEVEL_SIZE:
7018 name = "__builtin_goacc_parlevel_size";
7019 fallback_retval = const1_rtx;
7020 gen_fn = targetm.gen_oacc_dim_size;
7021 break;
7022 default:
7023 gcc_unreachable ();
7026 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
7028 error ("%qs only supported in OpenACC code", name);
7029 return const0_rtx;
7032 tree arg = CALL_EXPR_ARG (exp, 0);
7033 if (TREE_CODE (arg) != INTEGER_CST)
7035 error ("non-constant argument 0 to %qs", name);
7036 return const0_rtx;
7039 int dim = TREE_INT_CST_LOW (arg);
7040 switch (dim)
7042 case GOMP_DIM_GANG:
7043 case GOMP_DIM_WORKER:
7044 case GOMP_DIM_VECTOR:
7045 break;
7046 default:
7047 error ("illegal argument 0 to %qs", name);
7048 return const0_rtx;
7051 if (ignore)
7052 return target;
7054 if (target == NULL_RTX)
7055 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7057 if (!targetm.have_oacc_dim_size ())
7059 emit_move_insn (target, fallback_retval);
7060 return target;
7063 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
7064 emit_insn (gen_fn (reg, GEN_INT (dim)));
7065 if (reg != target)
7066 emit_move_insn (target, reg);
7068 return target;
7071 /* Expand a string compare operation using a sequence of char comparison
7072 to get rid of the calling overhead, with result going to TARGET if
7073 that's convenient.
7075 VAR_STR is the variable string source;
7076 CONST_STR is the constant string source;
7077 LENGTH is the number of chars to compare;
7078 CONST_STR_N indicates which source string is the constant string;
7079 IS_MEMCMP indicates whether it's a memcmp or strcmp.
7081 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7083 target = (int) (unsigned char) var_str[0]
7084 - (int) (unsigned char) const_str[0];
7085 if (target != 0)
7086 goto ne_label;
7088 target = (int) (unsigned char) var_str[length - 2]
7089 - (int) (unsigned char) const_str[length - 2];
7090 if (target != 0)
7091 goto ne_label;
7092 target = (int) (unsigned char) var_str[length - 1]
7093 - (int) (unsigned char) const_str[length - 1];
7094 ne_label:
7097 static rtx
7098 inline_string_cmp (rtx target, tree var_str, const char *const_str,
7099 unsigned HOST_WIDE_INT length,
7100 int const_str_n, machine_mode mode)
7102 HOST_WIDE_INT offset = 0;
7103 rtx var_rtx_array
7104 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
7105 rtx var_rtx = NULL_RTX;
7106 rtx const_rtx = NULL_RTX;
7107 rtx result = target ? target : gen_reg_rtx (mode);
7108 rtx_code_label *ne_label = gen_label_rtx ();
7109 tree unit_type_node = unsigned_char_type_node;
7110 scalar_int_mode unit_mode
7111 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
7113 start_sequence ();
7115 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
7117 var_rtx
7118 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
7119 const_rtx = c_readstr (const_str + offset, unit_mode);
7120 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
7121 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
7123 op0 = convert_modes (mode, unit_mode, op0, 1);
7124 op1 = convert_modes (mode, unit_mode, op1, 1);
7125 result = expand_simple_binop (mode, MINUS, op0, op1,
7126 result, 1, OPTAB_WIDEN);
7127 if (i < length - 1)
7128 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
7129 mode, true, ne_label);
7130 offset += GET_MODE_SIZE (unit_mode);
7133 emit_label (ne_label);
7134 rtx_insn *insns = get_insns ();
7135 end_sequence ();
7136 emit_insn (insns);
7138 return result;
7141 /* Inline expansion a call to str(n)cmp, with result going to
7142 TARGET if that's convenient.
7143 If the call is not been inlined, return NULL_RTX. */
7144 static rtx
7145 inline_expand_builtin_string_cmp (tree exp, rtx target)
7147 tree fndecl = get_callee_fndecl (exp);
7148 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7149 unsigned HOST_WIDE_INT length = 0;
7150 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
7152 /* Do NOT apply this inlining expansion when optimizing for size or
7153 optimization level below 2. */
7154 if (optimize < 2 || optimize_insn_for_size_p ())
7155 return NULL_RTX;
7157 gcc_checking_assert (fcode == BUILT_IN_STRCMP
7158 || fcode == BUILT_IN_STRNCMP
7159 || fcode == BUILT_IN_MEMCMP);
7161 /* On a target where the type of the call (int) has same or narrower presicion
7162 than unsigned char, give up the inlining expansion. */
7163 if (TYPE_PRECISION (unsigned_char_type_node)
7164 >= TYPE_PRECISION (TREE_TYPE (exp)))
7165 return NULL_RTX;
7167 tree arg1 = CALL_EXPR_ARG (exp, 0);
7168 tree arg2 = CALL_EXPR_ARG (exp, 1);
7169 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
7171 unsigned HOST_WIDE_INT len1 = 0;
7172 unsigned HOST_WIDE_INT len2 = 0;
7173 unsigned HOST_WIDE_INT len3 = 0;
7175 const char *src_str1 = c_getstr (arg1, &len1);
7176 const char *src_str2 = c_getstr (arg2, &len2);
7178 /* If neither strings is constant string, the call is not qualify. */
7179 if (!src_str1 && !src_str2)
7180 return NULL_RTX;
7182 /* For strncmp, if the length is not a const, not qualify. */
7183 if (is_ncmp)
7185 if (!tree_fits_uhwi_p (len3_tree))
7186 return NULL_RTX;
7187 else
7188 len3 = tree_to_uhwi (len3_tree);
7191 if (src_str1 != NULL)
7192 len1 = strnlen (src_str1, len1) + 1;
7194 if (src_str2 != NULL)
7195 len2 = strnlen (src_str2, len2) + 1;
7197 int const_str_n = 0;
7198 if (!len1)
7199 const_str_n = 2;
7200 else if (!len2)
7201 const_str_n = 1;
7202 else if (len2 > len1)
7203 const_str_n = 1;
7204 else
7205 const_str_n = 2;
7207 gcc_checking_assert (const_str_n > 0);
7208 length = (const_str_n == 1) ? len1 : len2;
7210 if (is_ncmp && len3 < length)
7211 length = len3;
7213 /* If the length of the comparision is larger than the threshold,
7214 do nothing. */
7215 if (length > (unsigned HOST_WIDE_INT)
7216 param_builtin_string_cmp_inline_length)
7217 return NULL_RTX;
7219 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7221 /* Now, start inline expansion the call. */
7222 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
7223 (const_str_n == 1) ? src_str1 : src_str2, length,
7224 const_str_n, mode);
7227 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7228 represents the size of the first argument to that call, or VOIDmode
7229 if the argument is a pointer. IGNORE will be true if the result
7230 isn't used. */
7231 static rtx
7232 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
7233 bool ignore)
7235 rtx val, failsafe;
7236 unsigned nargs = call_expr_nargs (exp);
7238 tree arg0 = CALL_EXPR_ARG (exp, 0);
7240 if (mode == VOIDmode)
7242 mode = TYPE_MODE (TREE_TYPE (arg0));
7243 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
7246 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
7248 /* An optional second argument can be used as a failsafe value on
7249 some machines. If it isn't present, then the failsafe value is
7250 assumed to be 0. */
7251 if (nargs > 1)
7253 tree arg1 = CALL_EXPR_ARG (exp, 1);
7254 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
7256 else
7257 failsafe = const0_rtx;
7259 /* If the result isn't used, the behavior is undefined. It would be
7260 nice to emit a warning here, but path splitting means this might
7261 happen with legitimate code. So simply drop the builtin
7262 expansion in that case; we've handled any side-effects above. */
7263 if (ignore)
7264 return const0_rtx;
7266 /* If we don't have a suitable target, create one to hold the result. */
7267 if (target == NULL || GET_MODE (target) != mode)
7268 target = gen_reg_rtx (mode);
7270 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7271 val = convert_modes (mode, VOIDmode, val, false);
7273 return targetm.speculation_safe_value (mode, target, val, failsafe);
7276 /* Expand an expression EXP that calls a built-in function,
7277 with result going to TARGET if that's convenient
7278 (and in mode MODE if that's convenient).
7279 SUBTARGET may be used as the target for computing one of EXP's operands.
7280 IGNORE is nonzero if the value is to be ignored. */
7283 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7284 int ignore)
7286 tree fndecl = get_callee_fndecl (exp);
7287 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7288 int flags;
7290 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7291 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7293 /* When ASan is enabled, we don't want to expand some memory/string
7294 builtins and rely on libsanitizer's hooks. This allows us to avoid
7295 redundant checks and be sure, that possible overflow will be detected
7296 by ASan. */
7298 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7299 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7300 return expand_call (exp, target, ignore);
7302 /* When not optimizing, generate calls to library functions for a certain
7303 set of builtins. */
7304 if (!optimize
7305 && !called_as_built_in (fndecl)
7306 && fcode != BUILT_IN_FORK
7307 && fcode != BUILT_IN_EXECL
7308 && fcode != BUILT_IN_EXECV
7309 && fcode != BUILT_IN_EXECLP
7310 && fcode != BUILT_IN_EXECLE
7311 && fcode != BUILT_IN_EXECVP
7312 && fcode != BUILT_IN_EXECVE
7313 && !ALLOCA_FUNCTION_CODE_P (fcode)
7314 && fcode != BUILT_IN_FREE)
7315 return expand_call (exp, target, ignore);
7317 /* The built-in function expanders test for target == const0_rtx
7318 to determine whether the function's result will be ignored. */
7319 if (ignore)
7320 target = const0_rtx;
7322 /* If the result of a pure or const built-in function is ignored, and
7323 none of its arguments are volatile, we can avoid expanding the
7324 built-in call and just evaluate the arguments for side-effects. */
7325 if (target == const0_rtx
7326 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7327 && !(flags & ECF_LOOPING_CONST_OR_PURE))
7329 bool volatilep = false;
7330 tree arg;
7331 call_expr_arg_iterator iter;
7333 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7334 if (TREE_THIS_VOLATILE (arg))
7336 volatilep = true;
7337 break;
7340 if (! volatilep)
7342 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7343 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7344 return const0_rtx;
7348 switch (fcode)
7350 CASE_FLT_FN (BUILT_IN_FABS):
7351 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7352 case BUILT_IN_FABSD32:
7353 case BUILT_IN_FABSD64:
7354 case BUILT_IN_FABSD128:
7355 target = expand_builtin_fabs (exp, target, subtarget);
7356 if (target)
7357 return target;
7358 break;
7360 CASE_FLT_FN (BUILT_IN_COPYSIGN):
7361 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7362 target = expand_builtin_copysign (exp, target, subtarget);
7363 if (target)
7364 return target;
7365 break;
7367 /* Just do a normal library call if we were unable to fold
7368 the values. */
7369 CASE_FLT_FN (BUILT_IN_CABS):
7370 break;
7372 CASE_FLT_FN (BUILT_IN_FMA):
7373 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7374 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7375 if (target)
7376 return target;
7377 break;
7379 CASE_FLT_FN (BUILT_IN_ILOGB):
7380 if (! flag_unsafe_math_optimizations)
7381 break;
7382 gcc_fallthrough ();
7383 CASE_FLT_FN (BUILT_IN_ISINF):
7384 CASE_FLT_FN (BUILT_IN_FINITE):
7385 case BUILT_IN_ISFINITE:
7386 case BUILT_IN_ISNORMAL:
7387 target = expand_builtin_interclass_mathfn (exp, target);
7388 if (target)
7389 return target;
7390 break;
7392 CASE_FLT_FN (BUILT_IN_ICEIL):
7393 CASE_FLT_FN (BUILT_IN_LCEIL):
7394 CASE_FLT_FN (BUILT_IN_LLCEIL):
7395 CASE_FLT_FN (BUILT_IN_LFLOOR):
7396 CASE_FLT_FN (BUILT_IN_IFLOOR):
7397 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7398 target = expand_builtin_int_roundingfn (exp, target);
7399 if (target)
7400 return target;
7401 break;
7403 CASE_FLT_FN (BUILT_IN_IRINT):
7404 CASE_FLT_FN (BUILT_IN_LRINT):
7405 CASE_FLT_FN (BUILT_IN_LLRINT):
7406 CASE_FLT_FN (BUILT_IN_IROUND):
7407 CASE_FLT_FN (BUILT_IN_LROUND):
7408 CASE_FLT_FN (BUILT_IN_LLROUND):
7409 target = expand_builtin_int_roundingfn_2 (exp, target);
7410 if (target)
7411 return target;
7412 break;
7414 CASE_FLT_FN (BUILT_IN_POWI):
7415 target = expand_builtin_powi (exp, target);
7416 if (target)
7417 return target;
7418 break;
7420 CASE_FLT_FN (BUILT_IN_CEXPI):
7421 target = expand_builtin_cexpi (exp, target);
7422 gcc_assert (target);
7423 return target;
7425 CASE_FLT_FN (BUILT_IN_SIN):
7426 CASE_FLT_FN (BUILT_IN_COS):
7427 if (! flag_unsafe_math_optimizations)
7428 break;
7429 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7430 if (target)
7431 return target;
7432 break;
7434 CASE_FLT_FN (BUILT_IN_SINCOS):
7435 if (! flag_unsafe_math_optimizations)
7436 break;
7437 target = expand_builtin_sincos (exp);
7438 if (target)
7439 return target;
7440 break;
7442 case BUILT_IN_APPLY_ARGS:
7443 return expand_builtin_apply_args ();
7445 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7446 FUNCTION with a copy of the parameters described by
7447 ARGUMENTS, and ARGSIZE. It returns a block of memory
7448 allocated on the stack into which is stored all the registers
7449 that might possibly be used for returning the result of a
7450 function. ARGUMENTS is the value returned by
7451 __builtin_apply_args. ARGSIZE is the number of bytes of
7452 arguments that must be copied. ??? How should this value be
7453 computed? We'll also need a safe worst case value for varargs
7454 functions. */
7455 case BUILT_IN_APPLY:
7456 if (!validate_arglist (exp, POINTER_TYPE,
7457 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7458 && !validate_arglist (exp, REFERENCE_TYPE,
7459 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7460 return const0_rtx;
7461 else
7463 rtx ops[3];
7465 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7466 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7467 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7469 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7472 /* __builtin_return (RESULT) causes the function to return the
7473 value described by RESULT. RESULT is address of the block of
7474 memory returned by __builtin_apply. */
7475 case BUILT_IN_RETURN:
7476 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7477 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7478 return const0_rtx;
7480 case BUILT_IN_SAVEREGS:
7481 return expand_builtin_saveregs ();
7483 case BUILT_IN_VA_ARG_PACK:
7484 /* All valid uses of __builtin_va_arg_pack () are removed during
7485 inlining. */
7486 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
7487 return const0_rtx;
7489 case BUILT_IN_VA_ARG_PACK_LEN:
7490 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7491 inlining. */
7492 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
7493 return const0_rtx;
7495 /* Return the address of the first anonymous stack arg. */
7496 case BUILT_IN_NEXT_ARG:
7497 if (fold_builtin_next_arg (exp, false))
7498 return const0_rtx;
7499 return expand_builtin_next_arg ();
7501 case BUILT_IN_CLEAR_CACHE:
7502 target = expand_builtin___clear_cache (exp);
7503 if (target)
7504 return target;
7505 break;
7507 case BUILT_IN_CLASSIFY_TYPE:
7508 return expand_builtin_classify_type (exp);
7510 case BUILT_IN_CONSTANT_P:
7511 return const0_rtx;
7513 case BUILT_IN_FRAME_ADDRESS:
7514 case BUILT_IN_RETURN_ADDRESS:
7515 return expand_builtin_frame_address (fndecl, exp);
7517 /* Returns the address of the area where the structure is returned.
7518 0 otherwise. */
7519 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7520 if (call_expr_nargs (exp) != 0
7521 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7522 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7523 return const0_rtx;
7524 else
7525 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7527 CASE_BUILT_IN_ALLOCA:
7528 target = expand_builtin_alloca (exp);
7529 if (target)
7530 return target;
7531 break;
7533 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7534 return expand_asan_emit_allocas_unpoison (exp);
7536 case BUILT_IN_STACK_SAVE:
7537 return expand_stack_save ();
7539 case BUILT_IN_STACK_RESTORE:
7540 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7541 return const0_rtx;
7543 case BUILT_IN_BSWAP16:
7544 case BUILT_IN_BSWAP32:
7545 case BUILT_IN_BSWAP64:
7546 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7547 if (target)
7548 return target;
7549 break;
7551 CASE_INT_FN (BUILT_IN_FFS):
7552 target = expand_builtin_unop (target_mode, exp, target,
7553 subtarget, ffs_optab);
7554 if (target)
7555 return target;
7556 break;
7558 CASE_INT_FN (BUILT_IN_CLZ):
7559 target = expand_builtin_unop (target_mode, exp, target,
7560 subtarget, clz_optab);
7561 if (target)
7562 return target;
7563 break;
7565 CASE_INT_FN (BUILT_IN_CTZ):
7566 target = expand_builtin_unop (target_mode, exp, target,
7567 subtarget, ctz_optab);
7568 if (target)
7569 return target;
7570 break;
7572 CASE_INT_FN (BUILT_IN_CLRSB):
7573 target = expand_builtin_unop (target_mode, exp, target,
7574 subtarget, clrsb_optab);
7575 if (target)
7576 return target;
7577 break;
7579 CASE_INT_FN (BUILT_IN_POPCOUNT):
7580 target = expand_builtin_unop (target_mode, exp, target,
7581 subtarget, popcount_optab);
7582 if (target)
7583 return target;
7584 break;
7586 CASE_INT_FN (BUILT_IN_PARITY):
7587 target = expand_builtin_unop (target_mode, exp, target,
7588 subtarget, parity_optab);
7589 if (target)
7590 return target;
7591 break;
7593 case BUILT_IN_STRLEN:
7594 target = expand_builtin_strlen (exp, target, target_mode);
7595 if (target)
7596 return target;
7597 break;
7599 case BUILT_IN_STRNLEN:
7600 target = expand_builtin_strnlen (exp, target, target_mode);
7601 if (target)
7602 return target;
7603 break;
7605 case BUILT_IN_STRCAT:
7606 target = expand_builtin_strcat (exp, target);
7607 if (target)
7608 return target;
7609 break;
7611 case BUILT_IN_STRCPY:
7612 target = expand_builtin_strcpy (exp, target);
7613 if (target)
7614 return target;
7615 break;
7617 case BUILT_IN_STRNCAT:
7618 target = expand_builtin_strncat (exp, target);
7619 if (target)
7620 return target;
7621 break;
7623 case BUILT_IN_STRNCPY:
7624 target = expand_builtin_strncpy (exp, target);
7625 if (target)
7626 return target;
7627 break;
7629 case BUILT_IN_STPCPY:
7630 target = expand_builtin_stpcpy (exp, target, mode);
7631 if (target)
7632 return target;
7633 break;
7635 case BUILT_IN_STPNCPY:
7636 target = expand_builtin_stpncpy (exp, target);
7637 if (target)
7638 return target;
7639 break;
7641 case BUILT_IN_MEMCHR:
7642 target = expand_builtin_memchr (exp, target);
7643 if (target)
7644 return target;
7645 break;
7647 case BUILT_IN_MEMCPY:
7648 target = expand_builtin_memcpy (exp, target);
7649 if (target)
7650 return target;
7651 break;
7653 case BUILT_IN_MEMMOVE:
7654 target = expand_builtin_memmove (exp, target);
7655 if (target)
7656 return target;
7657 break;
7659 case BUILT_IN_MEMPCPY:
7660 target = expand_builtin_mempcpy (exp, target);
7661 if (target)
7662 return target;
7663 break;
7665 case BUILT_IN_MEMSET:
7666 target = expand_builtin_memset (exp, target, mode);
7667 if (target)
7668 return target;
7669 break;
7671 case BUILT_IN_BZERO:
7672 target = expand_builtin_bzero (exp);
7673 if (target)
7674 return target;
7675 break;
7677 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7678 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
7679 when changing it to a strcmp call. */
7680 case BUILT_IN_STRCMP_EQ:
7681 target = expand_builtin_memcmp (exp, target, true);
7682 if (target)
7683 return target;
7685 /* Change this call back to a BUILT_IN_STRCMP. */
7686 TREE_OPERAND (exp, 1)
7687 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7689 /* Delete the last parameter. */
7690 unsigned int i;
7691 vec<tree, va_gc> *arg_vec;
7692 vec_alloc (arg_vec, 2);
7693 for (i = 0; i < 2; i++)
7694 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7695 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7696 /* FALLTHROUGH */
7698 case BUILT_IN_STRCMP:
7699 target = expand_builtin_strcmp (exp, target);
7700 if (target)
7701 return target;
7702 break;
7704 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7705 back to a BUILT_IN_STRNCMP. */
7706 case BUILT_IN_STRNCMP_EQ:
7707 target = expand_builtin_memcmp (exp, target, true);
7708 if (target)
7709 return target;
7711 /* Change it back to a BUILT_IN_STRNCMP. */
7712 TREE_OPERAND (exp, 1)
7713 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7714 /* FALLTHROUGH */
7716 case BUILT_IN_STRNCMP:
7717 target = expand_builtin_strncmp (exp, target, mode);
7718 if (target)
7719 return target;
7720 break;
7722 case BUILT_IN_BCMP:
7723 case BUILT_IN_MEMCMP:
7724 case BUILT_IN_MEMCMP_EQ:
7725 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7726 if (target)
7727 return target;
7728 if (fcode == BUILT_IN_MEMCMP_EQ)
7730 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7731 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7733 break;
7735 case BUILT_IN_SETJMP:
7736 /* This should have been lowered to the builtins below. */
7737 gcc_unreachable ();
7739 case BUILT_IN_SETJMP_SETUP:
7740 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7741 and the receiver label. */
7742 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7744 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7745 VOIDmode, EXPAND_NORMAL);
7746 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7747 rtx_insn *label_r = label_rtx (label);
7749 /* This is copied from the handling of non-local gotos. */
7750 expand_builtin_setjmp_setup (buf_addr, label_r);
7751 nonlocal_goto_handler_labels
7752 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7753 nonlocal_goto_handler_labels);
7754 /* ??? Do not let expand_label treat us as such since we would
7755 not want to be both on the list of non-local labels and on
7756 the list of forced labels. */
7757 FORCED_LABEL (label) = 0;
7758 return const0_rtx;
7760 break;
7762 case BUILT_IN_SETJMP_RECEIVER:
7763 /* __builtin_setjmp_receiver is passed the receiver label. */
7764 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7766 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7767 rtx_insn *label_r = label_rtx (label);
7769 expand_builtin_setjmp_receiver (label_r);
7770 return const0_rtx;
7772 break;
7774 /* __builtin_longjmp is passed a pointer to an array of five words.
7775 It's similar to the C library longjmp function but works with
7776 __builtin_setjmp above. */
7777 case BUILT_IN_LONGJMP:
7778 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7780 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7781 VOIDmode, EXPAND_NORMAL);
7782 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7784 if (value != const1_rtx)
7786 error ("%<__builtin_longjmp%> second argument must be 1");
7787 return const0_rtx;
7790 expand_builtin_longjmp (buf_addr, value);
7791 return const0_rtx;
7793 break;
7795 case BUILT_IN_NONLOCAL_GOTO:
7796 target = expand_builtin_nonlocal_goto (exp);
7797 if (target)
7798 return target;
7799 break;
7801 /* This updates the setjmp buffer that is its argument with the value
7802 of the current stack pointer. */
7803 case BUILT_IN_UPDATE_SETJMP_BUF:
7804 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7806 rtx buf_addr
7807 = expand_normal (CALL_EXPR_ARG (exp, 0));
7809 expand_builtin_update_setjmp_buf (buf_addr);
7810 return const0_rtx;
7812 break;
7814 case BUILT_IN_TRAP:
7815 expand_builtin_trap ();
7816 return const0_rtx;
7818 case BUILT_IN_UNREACHABLE:
7819 expand_builtin_unreachable ();
7820 return const0_rtx;
7822 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7823 case BUILT_IN_SIGNBITD32:
7824 case BUILT_IN_SIGNBITD64:
7825 case BUILT_IN_SIGNBITD128:
7826 target = expand_builtin_signbit (exp, target);
7827 if (target)
7828 return target;
7829 break;
7831 /* Various hooks for the DWARF 2 __throw routine. */
7832 case BUILT_IN_UNWIND_INIT:
7833 expand_builtin_unwind_init ();
7834 return const0_rtx;
7835 case BUILT_IN_DWARF_CFA:
7836 return virtual_cfa_rtx;
7837 #ifdef DWARF2_UNWIND_INFO
7838 case BUILT_IN_DWARF_SP_COLUMN:
7839 return expand_builtin_dwarf_sp_column ();
7840 case BUILT_IN_INIT_DWARF_REG_SIZES:
7841 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7842 return const0_rtx;
7843 #endif
7844 case BUILT_IN_FROB_RETURN_ADDR:
7845 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7846 case BUILT_IN_EXTRACT_RETURN_ADDR:
7847 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7848 case BUILT_IN_EH_RETURN:
7849 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7850 CALL_EXPR_ARG (exp, 1));
7851 return const0_rtx;
7852 case BUILT_IN_EH_RETURN_DATA_REGNO:
7853 return expand_builtin_eh_return_data_regno (exp);
7854 case BUILT_IN_EXTEND_POINTER:
7855 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7856 case BUILT_IN_EH_POINTER:
7857 return expand_builtin_eh_pointer (exp);
7858 case BUILT_IN_EH_FILTER:
7859 return expand_builtin_eh_filter (exp);
7860 case BUILT_IN_EH_COPY_VALUES:
7861 return expand_builtin_eh_copy_values (exp);
7863 case BUILT_IN_VA_START:
7864 return expand_builtin_va_start (exp);
7865 case BUILT_IN_VA_END:
7866 return expand_builtin_va_end (exp);
7867 case BUILT_IN_VA_COPY:
7868 return expand_builtin_va_copy (exp);
7869 case BUILT_IN_EXPECT:
7870 return expand_builtin_expect (exp, target);
7871 case BUILT_IN_EXPECT_WITH_PROBABILITY:
7872 return expand_builtin_expect_with_probability (exp, target);
7873 case BUILT_IN_ASSUME_ALIGNED:
7874 return expand_builtin_assume_aligned (exp, target);
7875 case BUILT_IN_PREFETCH:
7876 expand_builtin_prefetch (exp);
7877 return const0_rtx;
7879 case BUILT_IN_INIT_TRAMPOLINE:
7880 return expand_builtin_init_trampoline (exp, true);
7881 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7882 return expand_builtin_init_trampoline (exp, false);
7883 case BUILT_IN_ADJUST_TRAMPOLINE:
7884 return expand_builtin_adjust_trampoline (exp);
7886 case BUILT_IN_INIT_DESCRIPTOR:
7887 return expand_builtin_init_descriptor (exp);
7888 case BUILT_IN_ADJUST_DESCRIPTOR:
7889 return expand_builtin_adjust_descriptor (exp);
7891 case BUILT_IN_FORK:
7892 case BUILT_IN_EXECL:
7893 case BUILT_IN_EXECV:
7894 case BUILT_IN_EXECLP:
7895 case BUILT_IN_EXECLE:
7896 case BUILT_IN_EXECVP:
7897 case BUILT_IN_EXECVE:
7898 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7899 if (target)
7900 return target;
7901 break;
7903 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7904 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7905 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7906 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7907 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7908 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7909 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7910 if (target)
7911 return target;
7912 break;
7914 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7915 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7916 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7917 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7918 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7919 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7920 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7921 if (target)
7922 return target;
7923 break;
7925 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7926 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7927 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7928 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7929 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7930 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7931 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7932 if (target)
7933 return target;
7934 break;
7936 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7937 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7938 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7939 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7940 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7941 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7942 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7943 if (target)
7944 return target;
7945 break;
7947 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7948 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7949 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7950 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7951 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7952 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7953 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7954 if (target)
7955 return target;
7956 break;
7958 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7959 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7960 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7961 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7962 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7963 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7964 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7965 if (target)
7966 return target;
7967 break;
7969 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7970 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7971 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7972 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7973 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7974 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7975 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7976 if (target)
7977 return target;
7978 break;
7980 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7981 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7982 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7983 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7984 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7985 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7986 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7987 if (target)
7988 return target;
7989 break;
7991 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7992 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7993 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7994 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7995 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7996 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7997 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7998 if (target)
7999 return target;
8000 break;
8002 case BUILT_IN_SYNC_AND_AND_FETCH_1:
8003 case BUILT_IN_SYNC_AND_AND_FETCH_2:
8004 case BUILT_IN_SYNC_AND_AND_FETCH_4:
8005 case BUILT_IN_SYNC_AND_AND_FETCH_8:
8006 case BUILT_IN_SYNC_AND_AND_FETCH_16:
8007 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
8008 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
8009 if (target)
8010 return target;
8011 break;
8013 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
8014 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
8015 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
8016 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
8017 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
8018 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
8019 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
8020 if (target)
8021 return target;
8022 break;
8024 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
8025 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
8026 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
8027 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
8028 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
8029 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
8030 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
8031 if (target)
8032 return target;
8033 break;
8035 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
8036 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
8037 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
8038 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
8039 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
8040 if (mode == VOIDmode)
8041 mode = TYPE_MODE (boolean_type_node);
8042 if (!target || !register_operand (target, mode))
8043 target = gen_reg_rtx (mode);
8045 mode = get_builtin_sync_mode
8046 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
8047 target = expand_builtin_compare_and_swap (mode, exp, true, target);
8048 if (target)
8049 return target;
8050 break;
8052 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
8053 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
8054 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
8055 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
8056 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
8057 mode = get_builtin_sync_mode
8058 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
8059 target = expand_builtin_compare_and_swap (mode, exp, false, target);
8060 if (target)
8061 return target;
8062 break;
8064 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
8065 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
8066 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
8067 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
8068 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
8069 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
8070 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
8071 if (target)
8072 return target;
8073 break;
8075 case BUILT_IN_SYNC_LOCK_RELEASE_1:
8076 case BUILT_IN_SYNC_LOCK_RELEASE_2:
8077 case BUILT_IN_SYNC_LOCK_RELEASE_4:
8078 case BUILT_IN_SYNC_LOCK_RELEASE_8:
8079 case BUILT_IN_SYNC_LOCK_RELEASE_16:
8080 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
8081 expand_builtin_sync_lock_release (mode, exp);
8082 return const0_rtx;
8084 case BUILT_IN_SYNC_SYNCHRONIZE:
8085 expand_builtin_sync_synchronize ();
8086 return const0_rtx;
8088 case BUILT_IN_ATOMIC_EXCHANGE_1:
8089 case BUILT_IN_ATOMIC_EXCHANGE_2:
8090 case BUILT_IN_ATOMIC_EXCHANGE_4:
8091 case BUILT_IN_ATOMIC_EXCHANGE_8:
8092 case BUILT_IN_ATOMIC_EXCHANGE_16:
8093 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
8094 target = expand_builtin_atomic_exchange (mode, exp, target);
8095 if (target)
8096 return target;
8097 break;
8099 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
8100 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
8101 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
8102 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
8103 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
8105 unsigned int nargs, z;
8106 vec<tree, va_gc> *vec;
8108 mode =
8109 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
8110 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
8111 if (target)
8112 return target;
8114 /* If this is turned into an external library call, the weak parameter
8115 must be dropped to match the expected parameter list. */
8116 nargs = call_expr_nargs (exp);
8117 vec_alloc (vec, nargs - 1);
8118 for (z = 0; z < 3; z++)
8119 vec->quick_push (CALL_EXPR_ARG (exp, z));
8120 /* Skip the boolean weak parameter. */
8121 for (z = 4; z < 6; z++)
8122 vec->quick_push (CALL_EXPR_ARG (exp, z));
8123 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
8124 break;
8127 case BUILT_IN_ATOMIC_LOAD_1:
8128 case BUILT_IN_ATOMIC_LOAD_2:
8129 case BUILT_IN_ATOMIC_LOAD_4:
8130 case BUILT_IN_ATOMIC_LOAD_8:
8131 case BUILT_IN_ATOMIC_LOAD_16:
8132 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
8133 target = expand_builtin_atomic_load (mode, exp, target);
8134 if (target)
8135 return target;
8136 break;
8138 case BUILT_IN_ATOMIC_STORE_1:
8139 case BUILT_IN_ATOMIC_STORE_2:
8140 case BUILT_IN_ATOMIC_STORE_4:
8141 case BUILT_IN_ATOMIC_STORE_8:
8142 case BUILT_IN_ATOMIC_STORE_16:
8143 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
8144 target = expand_builtin_atomic_store (mode, exp);
8145 if (target)
8146 return const0_rtx;
8147 break;
8149 case BUILT_IN_ATOMIC_ADD_FETCH_1:
8150 case BUILT_IN_ATOMIC_ADD_FETCH_2:
8151 case BUILT_IN_ATOMIC_ADD_FETCH_4:
8152 case BUILT_IN_ATOMIC_ADD_FETCH_8:
8153 case BUILT_IN_ATOMIC_ADD_FETCH_16:
8155 enum built_in_function lib;
8156 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
8157 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
8158 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
8159 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
8160 ignore, lib);
8161 if (target)
8162 return target;
8163 break;
8165 case BUILT_IN_ATOMIC_SUB_FETCH_1:
8166 case BUILT_IN_ATOMIC_SUB_FETCH_2:
8167 case BUILT_IN_ATOMIC_SUB_FETCH_4:
8168 case BUILT_IN_ATOMIC_SUB_FETCH_8:
8169 case BUILT_IN_ATOMIC_SUB_FETCH_16:
8171 enum built_in_function lib;
8172 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
8173 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
8174 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
8175 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
8176 ignore, lib);
8177 if (target)
8178 return target;
8179 break;
8181 case BUILT_IN_ATOMIC_AND_FETCH_1:
8182 case BUILT_IN_ATOMIC_AND_FETCH_2:
8183 case BUILT_IN_ATOMIC_AND_FETCH_4:
8184 case BUILT_IN_ATOMIC_AND_FETCH_8:
8185 case BUILT_IN_ATOMIC_AND_FETCH_16:
8187 enum built_in_function lib;
8188 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
8189 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
8190 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
8191 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
8192 ignore, lib);
8193 if (target)
8194 return target;
8195 break;
8197 case BUILT_IN_ATOMIC_NAND_FETCH_1:
8198 case BUILT_IN_ATOMIC_NAND_FETCH_2:
8199 case BUILT_IN_ATOMIC_NAND_FETCH_4:
8200 case BUILT_IN_ATOMIC_NAND_FETCH_8:
8201 case BUILT_IN_ATOMIC_NAND_FETCH_16:
8203 enum built_in_function lib;
8204 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
8205 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
8206 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
8207 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
8208 ignore, lib);
8209 if (target)
8210 return target;
8211 break;
8213 case BUILT_IN_ATOMIC_XOR_FETCH_1:
8214 case BUILT_IN_ATOMIC_XOR_FETCH_2:
8215 case BUILT_IN_ATOMIC_XOR_FETCH_4:
8216 case BUILT_IN_ATOMIC_XOR_FETCH_8:
8217 case BUILT_IN_ATOMIC_XOR_FETCH_16:
8219 enum built_in_function lib;
8220 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
8221 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
8222 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
8223 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
8224 ignore, lib);
8225 if (target)
8226 return target;
8227 break;
8229 case BUILT_IN_ATOMIC_OR_FETCH_1:
8230 case BUILT_IN_ATOMIC_OR_FETCH_2:
8231 case BUILT_IN_ATOMIC_OR_FETCH_4:
8232 case BUILT_IN_ATOMIC_OR_FETCH_8:
8233 case BUILT_IN_ATOMIC_OR_FETCH_16:
8235 enum built_in_function lib;
8236 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
8237 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
8238 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
8239 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
8240 ignore, lib);
8241 if (target)
8242 return target;
8243 break;
8245 case BUILT_IN_ATOMIC_FETCH_ADD_1:
8246 case BUILT_IN_ATOMIC_FETCH_ADD_2:
8247 case BUILT_IN_ATOMIC_FETCH_ADD_4:
8248 case BUILT_IN_ATOMIC_FETCH_ADD_8:
8249 case BUILT_IN_ATOMIC_FETCH_ADD_16:
8250 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
8251 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
8252 ignore, BUILT_IN_NONE);
8253 if (target)
8254 return target;
8255 break;
8257 case BUILT_IN_ATOMIC_FETCH_SUB_1:
8258 case BUILT_IN_ATOMIC_FETCH_SUB_2:
8259 case BUILT_IN_ATOMIC_FETCH_SUB_4:
8260 case BUILT_IN_ATOMIC_FETCH_SUB_8:
8261 case BUILT_IN_ATOMIC_FETCH_SUB_16:
8262 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
8263 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
8264 ignore, BUILT_IN_NONE);
8265 if (target)
8266 return target;
8267 break;
8269 case BUILT_IN_ATOMIC_FETCH_AND_1:
8270 case BUILT_IN_ATOMIC_FETCH_AND_2:
8271 case BUILT_IN_ATOMIC_FETCH_AND_4:
8272 case BUILT_IN_ATOMIC_FETCH_AND_8:
8273 case BUILT_IN_ATOMIC_FETCH_AND_16:
8274 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8275 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8276 ignore, BUILT_IN_NONE);
8277 if (target)
8278 return target;
8279 break;
8281 case BUILT_IN_ATOMIC_FETCH_NAND_1:
8282 case BUILT_IN_ATOMIC_FETCH_NAND_2:
8283 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8284 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8285 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8286 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8287 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8288 ignore, BUILT_IN_NONE);
8289 if (target)
8290 return target;
8291 break;
8293 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8294 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8295 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8296 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8297 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8298 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8299 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8300 ignore, BUILT_IN_NONE);
8301 if (target)
8302 return target;
8303 break;
8305 case BUILT_IN_ATOMIC_FETCH_OR_1:
8306 case BUILT_IN_ATOMIC_FETCH_OR_2:
8307 case BUILT_IN_ATOMIC_FETCH_OR_4:
8308 case BUILT_IN_ATOMIC_FETCH_OR_8:
8309 case BUILT_IN_ATOMIC_FETCH_OR_16:
8310 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8311 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8312 ignore, BUILT_IN_NONE);
8313 if (target)
8314 return target;
8315 break;
8317 case BUILT_IN_ATOMIC_TEST_AND_SET:
8318 return expand_builtin_atomic_test_and_set (exp, target);
8320 case BUILT_IN_ATOMIC_CLEAR:
8321 return expand_builtin_atomic_clear (exp);
8323 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8324 return expand_builtin_atomic_always_lock_free (exp);
8326 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8327 target = expand_builtin_atomic_is_lock_free (exp);
8328 if (target)
8329 return target;
8330 break;
8332 case BUILT_IN_ATOMIC_THREAD_FENCE:
8333 expand_builtin_atomic_thread_fence (exp);
8334 return const0_rtx;
8336 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8337 expand_builtin_atomic_signal_fence (exp);
8338 return const0_rtx;
8340 case BUILT_IN_OBJECT_SIZE:
8341 return expand_builtin_object_size (exp);
8343 case BUILT_IN_MEMCPY_CHK:
8344 case BUILT_IN_MEMPCPY_CHK:
8345 case BUILT_IN_MEMMOVE_CHK:
8346 case BUILT_IN_MEMSET_CHK:
8347 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8348 if (target)
8349 return target;
8350 break;
8352 case BUILT_IN_STRCPY_CHK:
8353 case BUILT_IN_STPCPY_CHK:
8354 case BUILT_IN_STRNCPY_CHK:
8355 case BUILT_IN_STPNCPY_CHK:
8356 case BUILT_IN_STRCAT_CHK:
8357 case BUILT_IN_STRNCAT_CHK:
8358 case BUILT_IN_SNPRINTF_CHK:
8359 case BUILT_IN_VSNPRINTF_CHK:
8360 maybe_emit_chk_warning (exp, fcode);
8361 break;
8363 case BUILT_IN_SPRINTF_CHK:
8364 case BUILT_IN_VSPRINTF_CHK:
8365 maybe_emit_sprintf_chk_warning (exp, fcode);
8366 break;
8368 case BUILT_IN_FREE:
8369 if (warn_free_nonheap_object)
8370 maybe_emit_free_warning (exp);
8371 break;
8373 case BUILT_IN_THREAD_POINTER:
8374 return expand_builtin_thread_pointer (exp, target);
8376 case BUILT_IN_SET_THREAD_POINTER:
8377 expand_builtin_set_thread_pointer (exp);
8378 return const0_rtx;
8380 case BUILT_IN_ACC_ON_DEVICE:
8381 /* Do library call, if we failed to expand the builtin when
8382 folding. */
8383 break;
8385 case BUILT_IN_GOACC_PARLEVEL_ID:
8386 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8387 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8389 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8390 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8392 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8393 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8394 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8395 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8396 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8397 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8398 return expand_speculation_safe_value (mode, exp, target, ignore);
8400 default: /* just do library call, if unknown builtin */
8401 break;
8404 /* The switch statement above can drop through to cause the function
8405 to be called normally. */
8406 return expand_call (exp, target, ignore);
8409 /* Determine whether a tree node represents a call to a built-in
8410 function. If the tree T is a call to a built-in function with
8411 the right number of arguments of the appropriate types, return
8412 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8413 Otherwise the return value is END_BUILTINS. */
8415 enum built_in_function
8416 builtin_mathfn_code (const_tree t)
8418 const_tree fndecl, arg, parmlist;
8419 const_tree argtype, parmtype;
8420 const_call_expr_arg_iterator iter;
8422 if (TREE_CODE (t) != CALL_EXPR)
8423 return END_BUILTINS;
8425 fndecl = get_callee_fndecl (t);
8426 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8427 return END_BUILTINS;
8429 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8430 init_const_call_expr_arg_iterator (t, &iter);
8431 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8433 /* If a function doesn't take a variable number of arguments,
8434 the last element in the list will have type `void'. */
8435 parmtype = TREE_VALUE (parmlist);
8436 if (VOID_TYPE_P (parmtype))
8438 if (more_const_call_expr_args_p (&iter))
8439 return END_BUILTINS;
8440 return DECL_FUNCTION_CODE (fndecl);
8443 if (! more_const_call_expr_args_p (&iter))
8444 return END_BUILTINS;
8446 arg = next_const_call_expr_arg (&iter);
8447 argtype = TREE_TYPE (arg);
8449 if (SCALAR_FLOAT_TYPE_P (parmtype))
8451 if (! SCALAR_FLOAT_TYPE_P (argtype))
8452 return END_BUILTINS;
8454 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8456 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8457 return END_BUILTINS;
8459 else if (POINTER_TYPE_P (parmtype))
8461 if (! POINTER_TYPE_P (argtype))
8462 return END_BUILTINS;
8464 else if (INTEGRAL_TYPE_P (parmtype))
8466 if (! INTEGRAL_TYPE_P (argtype))
8467 return END_BUILTINS;
8469 else
8470 return END_BUILTINS;
8473 /* Variable-length argument list. */
8474 return DECL_FUNCTION_CODE (fndecl);
8477 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8478 evaluate to a constant. */
8480 static tree
8481 fold_builtin_constant_p (tree arg)
8483 /* We return 1 for a numeric type that's known to be a constant
8484 value at compile-time or for an aggregate type that's a
8485 literal constant. */
8486 STRIP_NOPS (arg);
8488 /* If we know this is a constant, emit the constant of one. */
8489 if (CONSTANT_CLASS_P (arg)
8490 || (TREE_CODE (arg) == CONSTRUCTOR
8491 && TREE_CONSTANT (arg)))
8492 return integer_one_node;
8493 if (TREE_CODE (arg) == ADDR_EXPR)
8495 tree op = TREE_OPERAND (arg, 0);
8496 if (TREE_CODE (op) == STRING_CST
8497 || (TREE_CODE (op) == ARRAY_REF
8498 && integer_zerop (TREE_OPERAND (op, 1))
8499 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8500 return integer_one_node;
8503 /* If this expression has side effects, show we don't know it to be a
8504 constant. Likewise if it's a pointer or aggregate type since in
8505 those case we only want literals, since those are only optimized
8506 when generating RTL, not later.
8507 And finally, if we are compiling an initializer, not code, we
8508 need to return a definite result now; there's not going to be any
8509 more optimization done. */
8510 if (TREE_SIDE_EFFECTS (arg)
8511 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8512 || POINTER_TYPE_P (TREE_TYPE (arg))
8513 || cfun == 0
8514 || folding_initializer
8515 || force_folding_builtin_constant_p)
8516 return integer_zero_node;
8518 return NULL_TREE;
8521 /* Create builtin_expect or builtin_expect_with_probability
8522 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8523 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8524 builtin_expect_with_probability instead uses third argument as PROBABILITY
8525 value. */
8527 static tree
8528 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8529 tree predictor, tree probability)
8531 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
8533 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
8534 : BUILT_IN_EXPECT_WITH_PROBABILITY);
8535 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8536 ret_type = TREE_TYPE (TREE_TYPE (fn));
8537 pred_type = TREE_VALUE (arg_types);
8538 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8540 pred = fold_convert_loc (loc, pred_type, pred);
8541 expected = fold_convert_loc (loc, expected_type, expected);
8543 if (probability)
8544 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
8545 else
8546 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8547 predictor);
8549 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8550 build_int_cst (ret_type, 0));
8553 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
8554 NULL_TREE if no simplification is possible. */
8556 tree
8557 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
8558 tree arg3)
8560 tree inner, fndecl, inner_arg0;
8561 enum tree_code code;
8563 /* Distribute the expected value over short-circuiting operators.
8564 See through the cast from truthvalue_type_node to long. */
8565 inner_arg0 = arg0;
8566 while (CONVERT_EXPR_P (inner_arg0)
8567 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8568 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8569 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8571 /* If this is a builtin_expect within a builtin_expect keep the
8572 inner one. See through a comparison against a constant. It
8573 might have been added to create a thruthvalue. */
8574 inner = inner_arg0;
8576 if (COMPARISON_CLASS_P (inner)
8577 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8578 inner = TREE_OPERAND (inner, 0);
8580 if (TREE_CODE (inner) == CALL_EXPR
8581 && (fndecl = get_callee_fndecl (inner))
8582 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
8583 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
8584 return arg0;
8586 inner = inner_arg0;
8587 code = TREE_CODE (inner);
8588 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8590 tree op0 = TREE_OPERAND (inner, 0);
8591 tree op1 = TREE_OPERAND (inner, 1);
8592 arg1 = save_expr (arg1);
8594 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
8595 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
8596 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8598 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8601 /* If the argument isn't invariant then there's nothing else we can do. */
8602 if (!TREE_CONSTANT (inner_arg0))
8603 return NULL_TREE;
8605 /* If we expect that a comparison against the argument will fold to
8606 a constant return the constant. In practice, this means a true
8607 constant or the address of a non-weak symbol. */
8608 inner = inner_arg0;
8609 STRIP_NOPS (inner);
8610 if (TREE_CODE (inner) == ADDR_EXPR)
8614 inner = TREE_OPERAND (inner, 0);
8616 while (TREE_CODE (inner) == COMPONENT_REF
8617 || TREE_CODE (inner) == ARRAY_REF);
8618 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8619 return NULL_TREE;
8622 /* Otherwise, ARG0 already has the proper type for the return value. */
8623 return arg0;
8626 /* Fold a call to __builtin_classify_type with argument ARG. */
8628 static tree
8629 fold_builtin_classify_type (tree arg)
8631 if (arg == 0)
8632 return build_int_cst (integer_type_node, no_type_class);
8634 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8637 /* Fold a call to __builtin_strlen with argument ARG. */
8639 static tree
8640 fold_builtin_strlen (location_t loc, tree type, tree arg)
8642 if (!validate_arg (arg, POINTER_TYPE))
8643 return NULL_TREE;
8644 else
8646 c_strlen_data lendata = { };
8647 tree len = c_strlen (arg, 0, &lendata);
8649 if (len)
8650 return fold_convert_loc (loc, type, len);
8652 if (!lendata.decl)
8653 c_strlen (arg, 1, &lendata);
8655 if (lendata.decl)
8657 if (EXPR_HAS_LOCATION (arg))
8658 loc = EXPR_LOCATION (arg);
8659 else if (loc == UNKNOWN_LOCATION)
8660 loc = input_location;
8661 warn_string_no_nul (loc, "strlen", arg, lendata.decl);
8664 return NULL_TREE;
8668 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8670 static tree
8671 fold_builtin_inf (location_t loc, tree type, int warn)
8673 REAL_VALUE_TYPE real;
8675 /* __builtin_inff is intended to be usable to define INFINITY on all
8676 targets. If an infinity is not available, INFINITY expands "to a
8677 positive constant of type float that overflows at translation
8678 time", footnote "In this case, using INFINITY will violate the
8679 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8680 Thus we pedwarn to ensure this constraint violation is
8681 diagnosed. */
8682 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8683 pedwarn (loc, 0, "target format does not support infinity");
8685 real_inf (&real);
8686 return build_real (type, real);
8689 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8690 NULL_TREE if no simplification can be made. */
8692 static tree
8693 fold_builtin_sincos (location_t loc,
8694 tree arg0, tree arg1, tree arg2)
8696 tree type;
8697 tree fndecl, call = NULL_TREE;
8699 if (!validate_arg (arg0, REAL_TYPE)
8700 || !validate_arg (arg1, POINTER_TYPE)
8701 || !validate_arg (arg2, POINTER_TYPE))
8702 return NULL_TREE;
8704 type = TREE_TYPE (arg0);
8706 /* Calculate the result when the argument is a constant. */
8707 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8708 if (fn == END_BUILTINS)
8709 return NULL_TREE;
8711 /* Canonicalize sincos to cexpi. */
8712 if (TREE_CODE (arg0) == REAL_CST)
8714 tree complex_type = build_complex_type (type);
8715 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8717 if (!call)
8719 if (!targetm.libc_has_function (function_c99_math_complex)
8720 || !builtin_decl_implicit_p (fn))
8721 return NULL_TREE;
8722 fndecl = builtin_decl_explicit (fn);
8723 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8724 call = builtin_save_expr (call);
8727 tree ptype = build_pointer_type (type);
8728 arg1 = fold_convert (ptype, arg1);
8729 arg2 = fold_convert (ptype, arg2);
8730 return build2 (COMPOUND_EXPR, void_type_node,
8731 build2 (MODIFY_EXPR, void_type_node,
8732 build_fold_indirect_ref_loc (loc, arg1),
8733 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8734 build2 (MODIFY_EXPR, void_type_node,
8735 build_fold_indirect_ref_loc (loc, arg2),
8736 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8739 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8740 Return NULL_TREE if no simplification can be made. */
8742 static tree
8743 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8745 if (!validate_arg (arg1, POINTER_TYPE)
8746 || !validate_arg (arg2, POINTER_TYPE)
8747 || !validate_arg (len, INTEGER_TYPE))
8748 return NULL_TREE;
8750 /* If the LEN parameter is zero, return zero. */
8751 if (integer_zerop (len))
8752 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8753 arg1, arg2);
8755 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8756 if (operand_equal_p (arg1, arg2, 0))
8757 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8759 /* If len parameter is one, return an expression corresponding to
8760 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8761 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8763 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8764 tree cst_uchar_ptr_node
8765 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8767 tree ind1
8768 = fold_convert_loc (loc, integer_type_node,
8769 build1 (INDIRECT_REF, cst_uchar_node,
8770 fold_convert_loc (loc,
8771 cst_uchar_ptr_node,
8772 arg1)));
8773 tree ind2
8774 = fold_convert_loc (loc, integer_type_node,
8775 build1 (INDIRECT_REF, cst_uchar_node,
8776 fold_convert_loc (loc,
8777 cst_uchar_ptr_node,
8778 arg2)));
8779 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8782 return NULL_TREE;
8785 /* Fold a call to builtin isascii with argument ARG. */
8787 static tree
8788 fold_builtin_isascii (location_t loc, tree arg)
8790 if (!validate_arg (arg, INTEGER_TYPE))
8791 return NULL_TREE;
8792 else
8794 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8795 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8796 build_int_cst (integer_type_node,
8797 ~ (unsigned HOST_WIDE_INT) 0x7f));
8798 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8799 arg, integer_zero_node);
8803 /* Fold a call to builtin toascii with argument ARG. */
8805 static tree
8806 fold_builtin_toascii (location_t loc, tree arg)
8808 if (!validate_arg (arg, INTEGER_TYPE))
8809 return NULL_TREE;
8811 /* Transform toascii(c) -> (c & 0x7f). */
8812 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8813 build_int_cst (integer_type_node, 0x7f));
8816 /* Fold a call to builtin isdigit with argument ARG. */
8818 static tree
8819 fold_builtin_isdigit (location_t loc, tree arg)
8821 if (!validate_arg (arg, INTEGER_TYPE))
8822 return NULL_TREE;
8823 else
8825 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8826 /* According to the C standard, isdigit is unaffected by locale.
8827 However, it definitely is affected by the target character set. */
8828 unsigned HOST_WIDE_INT target_digit0
8829 = lang_hooks.to_target_charset ('0');
8831 if (target_digit0 == 0)
8832 return NULL_TREE;
8834 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8835 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8836 build_int_cst (unsigned_type_node, target_digit0));
8837 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8838 build_int_cst (unsigned_type_node, 9));
8842 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8844 static tree
8845 fold_builtin_fabs (location_t loc, tree arg, tree type)
8847 if (!validate_arg (arg, REAL_TYPE))
8848 return NULL_TREE;
8850 arg = fold_convert_loc (loc, type, arg);
8851 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8854 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8856 static tree
8857 fold_builtin_abs (location_t loc, tree arg, tree type)
8859 if (!validate_arg (arg, INTEGER_TYPE))
8860 return NULL_TREE;
8862 arg = fold_convert_loc (loc, type, arg);
8863 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8866 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8868 static tree
8869 fold_builtin_carg (location_t loc, tree arg, tree type)
8871 if (validate_arg (arg, COMPLEX_TYPE)
8872 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8874 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8876 if (atan2_fn)
8878 tree new_arg = builtin_save_expr (arg);
8879 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8880 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8881 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8885 return NULL_TREE;
8888 /* Fold a call to builtin frexp, we can assume the base is 2. */
8890 static tree
8891 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8893 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8894 return NULL_TREE;
8896 STRIP_NOPS (arg0);
8898 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8899 return NULL_TREE;
8901 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8903 /* Proceed if a valid pointer type was passed in. */
8904 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8906 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8907 tree frac, exp;
8909 switch (value->cl)
8911 case rvc_zero:
8912 /* For +-0, return (*exp = 0, +-0). */
8913 exp = integer_zero_node;
8914 frac = arg0;
8915 break;
8916 case rvc_nan:
8917 case rvc_inf:
8918 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8919 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8920 case rvc_normal:
8922 /* Since the frexp function always expects base 2, and in
8923 GCC normalized significands are already in the range
8924 [0.5, 1.0), we have exactly what frexp wants. */
8925 REAL_VALUE_TYPE frac_rvt = *value;
8926 SET_REAL_EXP (&frac_rvt, 0);
8927 frac = build_real (rettype, frac_rvt);
8928 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8930 break;
8931 default:
8932 gcc_unreachable ();
8935 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8936 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8937 TREE_SIDE_EFFECTS (arg1) = 1;
8938 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8941 return NULL_TREE;
8944 /* Fold a call to builtin modf. */
8946 static tree
8947 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8949 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8950 return NULL_TREE;
8952 STRIP_NOPS (arg0);
8954 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8955 return NULL_TREE;
8957 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8959 /* Proceed if a valid pointer type was passed in. */
8960 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8962 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8963 REAL_VALUE_TYPE trunc, frac;
8965 switch (value->cl)
8967 case rvc_nan:
8968 case rvc_zero:
8969 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8970 trunc = frac = *value;
8971 break;
8972 case rvc_inf:
8973 /* For +-Inf, return (*arg1 = arg0, +-0). */
8974 frac = dconst0;
8975 frac.sign = value->sign;
8976 trunc = *value;
8977 break;
8978 case rvc_normal:
8979 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8980 real_trunc (&trunc, VOIDmode, value);
8981 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8982 /* If the original number was negative and already
8983 integral, then the fractional part is -0.0. */
8984 if (value->sign && frac.cl == rvc_zero)
8985 frac.sign = value->sign;
8986 break;
8989 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8990 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8991 build_real (rettype, trunc));
8992 TREE_SIDE_EFFECTS (arg1) = 1;
8993 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8994 build_real (rettype, frac));
8997 return NULL_TREE;
9000 /* Given a location LOC, an interclass builtin function decl FNDECL
9001 and its single argument ARG, return an folded expression computing
9002 the same, or NULL_TREE if we either couldn't or didn't want to fold
9003 (the latter happen if there's an RTL instruction available). */
9005 static tree
9006 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9008 machine_mode mode;
9010 if (!validate_arg (arg, REAL_TYPE))
9011 return NULL_TREE;
9013 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9014 return NULL_TREE;
9016 mode = TYPE_MODE (TREE_TYPE (arg));
9018 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
9020 /* If there is no optab, try generic code. */
9021 switch (DECL_FUNCTION_CODE (fndecl))
9023 tree result;
9025 CASE_FLT_FN (BUILT_IN_ISINF):
9027 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9028 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9029 tree type = TREE_TYPE (arg);
9030 REAL_VALUE_TYPE r;
9031 char buf[128];
9033 if (is_ibm_extended)
9035 /* NaN and Inf are encoded in the high-order double value
9036 only. The low-order value is not significant. */
9037 type = double_type_node;
9038 mode = DFmode;
9039 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9041 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9042 real_from_string (&r, buf);
9043 result = build_call_expr (isgr_fn, 2,
9044 fold_build1_loc (loc, ABS_EXPR, type, arg),
9045 build_real (type, r));
9046 return result;
9048 CASE_FLT_FN (BUILT_IN_FINITE):
9049 case BUILT_IN_ISFINITE:
9051 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9052 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9053 tree type = TREE_TYPE (arg);
9054 REAL_VALUE_TYPE r;
9055 char buf[128];
9057 if (is_ibm_extended)
9059 /* NaN and Inf are encoded in the high-order double value
9060 only. The low-order value is not significant. */
9061 type = double_type_node;
9062 mode = DFmode;
9063 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9065 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9066 real_from_string (&r, buf);
9067 result = build_call_expr (isle_fn, 2,
9068 fold_build1_loc (loc, ABS_EXPR, type, arg),
9069 build_real (type, r));
9070 /*result = fold_build2_loc (loc, UNGT_EXPR,
9071 TREE_TYPE (TREE_TYPE (fndecl)),
9072 fold_build1_loc (loc, ABS_EXPR, type, arg),
9073 build_real (type, r));
9074 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9075 TREE_TYPE (TREE_TYPE (fndecl)),
9076 result);*/
9077 return result;
9079 case BUILT_IN_ISNORMAL:
9081 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9082 islessequal(fabs(x),DBL_MAX). */
9083 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9084 tree type = TREE_TYPE (arg);
9085 tree orig_arg, max_exp, min_exp;
9086 machine_mode orig_mode = mode;
9087 REAL_VALUE_TYPE rmax, rmin;
9088 char buf[128];
9090 orig_arg = arg = builtin_save_expr (arg);
9091 if (is_ibm_extended)
9093 /* Use double to test the normal range of IBM extended
9094 precision. Emin for IBM extended precision is
9095 different to emin for IEEE double, being 53 higher
9096 since the low double exponent is at least 53 lower
9097 than the high double exponent. */
9098 type = double_type_node;
9099 mode = DFmode;
9100 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9102 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
9104 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9105 real_from_string (&rmax, buf);
9106 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9107 real_from_string (&rmin, buf);
9108 max_exp = build_real (type, rmax);
9109 min_exp = build_real (type, rmin);
9111 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
9112 if (is_ibm_extended)
9114 /* Testing the high end of the range is done just using
9115 the high double, using the same test as isfinite().
9116 For the subnormal end of the range we first test the
9117 high double, then if its magnitude is equal to the
9118 limit of 0x1p-969, we test whether the low double is
9119 non-zero and opposite sign to the high double. */
9120 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
9121 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9122 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
9123 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
9124 arg, min_exp);
9125 tree as_complex = build1 (VIEW_CONVERT_EXPR,
9126 complex_double_type_node, orig_arg);
9127 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
9128 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
9129 tree zero = build_real (type, dconst0);
9130 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
9131 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
9132 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
9133 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
9134 fold_build3 (COND_EXPR,
9135 integer_type_node,
9136 hilt, logt, lolt));
9137 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
9138 eq_min, ok_lo);
9139 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
9140 gt_min, eq_min);
9142 else
9144 tree const isge_fn
9145 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9146 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
9148 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
9149 max_exp, min_exp);
9150 return result;
9152 default:
9153 break;
9156 return NULL_TREE;
9159 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9160 ARG is the argument for the call. */
9162 static tree
9163 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9165 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9167 if (!validate_arg (arg, REAL_TYPE))
9168 return NULL_TREE;
9170 switch (builtin_index)
9172 case BUILT_IN_ISINF:
9173 if (!HONOR_INFINITIES (arg))
9174 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9176 return NULL_TREE;
9178 case BUILT_IN_ISINF_SIGN:
9180 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9181 /* In a boolean context, GCC will fold the inner COND_EXPR to
9182 1. So e.g. "if (isinf_sign(x))" would be folded to just
9183 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9184 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
9185 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9186 tree tmp = NULL_TREE;
9188 arg = builtin_save_expr (arg);
9190 if (signbit_fn && isinf_fn)
9192 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9193 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9195 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9196 signbit_call, integer_zero_node);
9197 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9198 isinf_call, integer_zero_node);
9200 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9201 integer_minus_one_node, integer_one_node);
9202 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9203 isinf_call, tmp,
9204 integer_zero_node);
9207 return tmp;
9210 case BUILT_IN_ISFINITE:
9211 if (!HONOR_NANS (arg)
9212 && !HONOR_INFINITIES (arg))
9213 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9215 return NULL_TREE;
9217 case BUILT_IN_ISNAN:
9218 if (!HONOR_NANS (arg))
9219 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9222 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
9223 if (is_ibm_extended)
9225 /* NaN and Inf are encoded in the high-order double value
9226 only. The low-order value is not significant. */
9227 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9230 arg = builtin_save_expr (arg);
9231 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9233 default:
9234 gcc_unreachable ();
9238 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9239 This builtin will generate code to return the appropriate floating
9240 point classification depending on the value of the floating point
9241 number passed in. The possible return values must be supplied as
9242 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9243 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9244 one floating point argument which is "type generic". */
9246 static tree
9247 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9249 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9250 arg, type, res, tmp;
9251 machine_mode mode;
9252 REAL_VALUE_TYPE r;
9253 char buf[128];
9255 /* Verify the required arguments in the original call. */
9256 if (nargs != 6
9257 || !validate_arg (args[0], INTEGER_TYPE)
9258 || !validate_arg (args[1], INTEGER_TYPE)
9259 || !validate_arg (args[2], INTEGER_TYPE)
9260 || !validate_arg (args[3], INTEGER_TYPE)
9261 || !validate_arg (args[4], INTEGER_TYPE)
9262 || !validate_arg (args[5], REAL_TYPE))
9263 return NULL_TREE;
9265 fp_nan = args[0];
9266 fp_infinite = args[1];
9267 fp_normal = args[2];
9268 fp_subnormal = args[3];
9269 fp_zero = args[4];
9270 arg = args[5];
9271 type = TREE_TYPE (arg);
9272 mode = TYPE_MODE (type);
9273 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9275 /* fpclassify(x) ->
9276 isnan(x) ? FP_NAN :
9277 (fabs(x) == Inf ? FP_INFINITE :
9278 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9279 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9281 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9282 build_real (type, dconst0));
9283 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9284 tmp, fp_zero, fp_subnormal);
9286 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9287 real_from_string (&r, buf);
9288 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9289 arg, build_real (type, r));
9290 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9292 if (HONOR_INFINITIES (mode))
9294 real_inf (&r);
9295 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9296 build_real (type, r));
9297 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9298 fp_infinite, res);
9301 if (HONOR_NANS (mode))
9303 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9304 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9307 return res;
9310 /* Fold a call to an unordered comparison function such as
9311 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9312 being called and ARG0 and ARG1 are the arguments for the call.
9313 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9314 the opposite of the desired result. UNORDERED_CODE is used
9315 for modes that can hold NaNs and ORDERED_CODE is used for
9316 the rest. */
9318 static tree
9319 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9320 enum tree_code unordered_code,
9321 enum tree_code ordered_code)
9323 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9324 enum tree_code code;
9325 tree type0, type1;
9326 enum tree_code code0, code1;
9327 tree cmp_type = NULL_TREE;
9329 type0 = TREE_TYPE (arg0);
9330 type1 = TREE_TYPE (arg1);
9332 code0 = TREE_CODE (type0);
9333 code1 = TREE_CODE (type1);
9335 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9336 /* Choose the wider of two real types. */
9337 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9338 ? type0 : type1;
9339 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9340 cmp_type = type0;
9341 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9342 cmp_type = type1;
9344 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9345 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9347 if (unordered_code == UNORDERED_EXPR)
9349 if (!HONOR_NANS (arg0))
9350 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9351 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9354 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9355 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9356 fold_build2_loc (loc, code, type, arg0, arg1));
9359 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9360 arithmetics if it can never overflow, or into internal functions that
9361 return both result of arithmetics and overflowed boolean flag in
9362 a complex integer result, or some other check for overflow.
9363 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9364 checking part of that. */
9366 static tree
9367 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9368 tree arg0, tree arg1, tree arg2)
9370 enum internal_fn ifn = IFN_LAST;
9371 /* The code of the expression corresponding to the built-in. */
9372 enum tree_code opcode = ERROR_MARK;
9373 bool ovf_only = false;
9375 switch (fcode)
9377 case BUILT_IN_ADD_OVERFLOW_P:
9378 ovf_only = true;
9379 /* FALLTHRU */
9380 case BUILT_IN_ADD_OVERFLOW:
9381 case BUILT_IN_SADD_OVERFLOW:
9382 case BUILT_IN_SADDL_OVERFLOW:
9383 case BUILT_IN_SADDLL_OVERFLOW:
9384 case BUILT_IN_UADD_OVERFLOW:
9385 case BUILT_IN_UADDL_OVERFLOW:
9386 case BUILT_IN_UADDLL_OVERFLOW:
9387 opcode = PLUS_EXPR;
9388 ifn = IFN_ADD_OVERFLOW;
9389 break;
9390 case BUILT_IN_SUB_OVERFLOW_P:
9391 ovf_only = true;
9392 /* FALLTHRU */
9393 case BUILT_IN_SUB_OVERFLOW:
9394 case BUILT_IN_SSUB_OVERFLOW:
9395 case BUILT_IN_SSUBL_OVERFLOW:
9396 case BUILT_IN_SSUBLL_OVERFLOW:
9397 case BUILT_IN_USUB_OVERFLOW:
9398 case BUILT_IN_USUBL_OVERFLOW:
9399 case BUILT_IN_USUBLL_OVERFLOW:
9400 opcode = MINUS_EXPR;
9401 ifn = IFN_SUB_OVERFLOW;
9402 break;
9403 case BUILT_IN_MUL_OVERFLOW_P:
9404 ovf_only = true;
9405 /* FALLTHRU */
9406 case BUILT_IN_MUL_OVERFLOW:
9407 case BUILT_IN_SMUL_OVERFLOW:
9408 case BUILT_IN_SMULL_OVERFLOW:
9409 case BUILT_IN_SMULLL_OVERFLOW:
9410 case BUILT_IN_UMUL_OVERFLOW:
9411 case BUILT_IN_UMULL_OVERFLOW:
9412 case BUILT_IN_UMULLL_OVERFLOW:
9413 opcode = MULT_EXPR;
9414 ifn = IFN_MUL_OVERFLOW;
9415 break;
9416 default:
9417 gcc_unreachable ();
9420 /* For the "generic" overloads, the first two arguments can have different
9421 types and the last argument determines the target type to use to check
9422 for overflow. The arguments of the other overloads all have the same
9423 type. */
9424 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9426 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9427 arguments are constant, attempt to fold the built-in call into a constant
9428 expression indicating whether or not it detected an overflow. */
9429 if (ovf_only
9430 && TREE_CODE (arg0) == INTEGER_CST
9431 && TREE_CODE (arg1) == INTEGER_CST)
9432 /* Perform the computation in the target type and check for overflow. */
9433 return omit_one_operand_loc (loc, boolean_type_node,
9434 arith_overflowed_p (opcode, type, arg0, arg1)
9435 ? boolean_true_node : boolean_false_node,
9436 arg2);
9438 tree intres, ovfres;
9439 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9441 intres = fold_binary_loc (loc, opcode, type,
9442 fold_convert_loc (loc, type, arg0),
9443 fold_convert_loc (loc, type, arg1));
9444 if (TREE_OVERFLOW (intres))
9445 intres = drop_tree_overflow (intres);
9446 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
9447 ? boolean_true_node : boolean_false_node);
9449 else
9451 tree ctype = build_complex_type (type);
9452 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
9453 arg0, arg1);
9454 tree tgt = save_expr (call);
9455 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9456 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9457 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9460 if (ovf_only)
9461 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9463 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9464 tree store
9465 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9466 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9469 /* Fold a call to __builtin_FILE to a constant string. */
9471 static inline tree
9472 fold_builtin_FILE (location_t loc)
9474 if (const char *fname = LOCATION_FILE (loc))
9476 /* The documentation says this builtin is equivalent to the preprocessor
9477 __FILE__ macro so it appears appropriate to use the same file prefix
9478 mappings. */
9479 fname = remap_macro_filename (fname);
9480 return build_string_literal (strlen (fname) + 1, fname);
9483 return build_string_literal (1, "");
9486 /* Fold a call to __builtin_FUNCTION to a constant string. */
9488 static inline tree
9489 fold_builtin_FUNCTION ()
9491 const char *name = "";
9493 if (current_function_decl)
9494 name = lang_hooks.decl_printable_name (current_function_decl, 0);
9496 return build_string_literal (strlen (name) + 1, name);
9499 /* Fold a call to __builtin_LINE to an integer constant. */
9501 static inline tree
9502 fold_builtin_LINE (location_t loc, tree type)
9504 return build_int_cst (type, LOCATION_LINE (loc));
9507 /* Fold a call to built-in function FNDECL with 0 arguments.
9508 This function returns NULL_TREE if no simplification was possible. */
9510 static tree
9511 fold_builtin_0 (location_t loc, tree fndecl)
9513 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9514 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9515 switch (fcode)
9517 case BUILT_IN_FILE:
9518 return fold_builtin_FILE (loc);
9520 case BUILT_IN_FUNCTION:
9521 return fold_builtin_FUNCTION ();
9523 case BUILT_IN_LINE:
9524 return fold_builtin_LINE (loc, type);
9526 CASE_FLT_FN (BUILT_IN_INF):
9527 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
9528 case BUILT_IN_INFD32:
9529 case BUILT_IN_INFD64:
9530 case BUILT_IN_INFD128:
9531 return fold_builtin_inf (loc, type, true);
9533 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9534 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
9535 return fold_builtin_inf (loc, type, false);
9537 case BUILT_IN_CLASSIFY_TYPE:
9538 return fold_builtin_classify_type (NULL_TREE);
9540 default:
9541 break;
9543 return NULL_TREE;
9546 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9547 This function returns NULL_TREE if no simplification was possible. */
9549 static tree
9550 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9552 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9553 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9555 if (TREE_CODE (arg0) == ERROR_MARK)
9556 return NULL_TREE;
9558 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
9559 return ret;
9561 switch (fcode)
9563 case BUILT_IN_CONSTANT_P:
9565 tree val = fold_builtin_constant_p (arg0);
9567 /* Gimplification will pull the CALL_EXPR for the builtin out of
9568 an if condition. When not optimizing, we'll not CSE it back.
9569 To avoid link error types of regressions, return false now. */
9570 if (!val && !optimize)
9571 val = integer_zero_node;
9573 return val;
9576 case BUILT_IN_CLASSIFY_TYPE:
9577 return fold_builtin_classify_type (arg0);
9579 case BUILT_IN_STRLEN:
9580 return fold_builtin_strlen (loc, type, arg0);
9582 CASE_FLT_FN (BUILT_IN_FABS):
9583 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9584 case BUILT_IN_FABSD32:
9585 case BUILT_IN_FABSD64:
9586 case BUILT_IN_FABSD128:
9587 return fold_builtin_fabs (loc, arg0, type);
9589 case BUILT_IN_ABS:
9590 case BUILT_IN_LABS:
9591 case BUILT_IN_LLABS:
9592 case BUILT_IN_IMAXABS:
9593 return fold_builtin_abs (loc, arg0, type);
9595 CASE_FLT_FN (BUILT_IN_CONJ):
9596 if (validate_arg (arg0, COMPLEX_TYPE)
9597 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9598 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9599 break;
9601 CASE_FLT_FN (BUILT_IN_CREAL):
9602 if (validate_arg (arg0, COMPLEX_TYPE)
9603 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9604 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9605 break;
9607 CASE_FLT_FN (BUILT_IN_CIMAG):
9608 if (validate_arg (arg0, COMPLEX_TYPE)
9609 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9610 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9611 break;
9613 CASE_FLT_FN (BUILT_IN_CARG):
9614 return fold_builtin_carg (loc, arg0, type);
9616 case BUILT_IN_ISASCII:
9617 return fold_builtin_isascii (loc, arg0);
9619 case BUILT_IN_TOASCII:
9620 return fold_builtin_toascii (loc, arg0);
9622 case BUILT_IN_ISDIGIT:
9623 return fold_builtin_isdigit (loc, arg0);
9625 CASE_FLT_FN (BUILT_IN_FINITE):
9626 case BUILT_IN_FINITED32:
9627 case BUILT_IN_FINITED64:
9628 case BUILT_IN_FINITED128:
9629 case BUILT_IN_ISFINITE:
9631 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9632 if (ret)
9633 return ret;
9634 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9637 CASE_FLT_FN (BUILT_IN_ISINF):
9638 case BUILT_IN_ISINFD32:
9639 case BUILT_IN_ISINFD64:
9640 case BUILT_IN_ISINFD128:
9642 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9643 if (ret)
9644 return ret;
9645 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9648 case BUILT_IN_ISNORMAL:
9649 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9651 case BUILT_IN_ISINF_SIGN:
9652 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9654 CASE_FLT_FN (BUILT_IN_ISNAN):
9655 case BUILT_IN_ISNAND32:
9656 case BUILT_IN_ISNAND64:
9657 case BUILT_IN_ISNAND128:
9658 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9660 case BUILT_IN_FREE:
9661 if (integer_zerop (arg0))
9662 return build_empty_stmt (loc);
9663 break;
9665 default:
9666 break;
9669 return NULL_TREE;
9673 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9674 This function returns NULL_TREE if no simplification was possible. */
9676 static tree
9677 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
9679 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9680 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9682 if (TREE_CODE (arg0) == ERROR_MARK
9683 || TREE_CODE (arg1) == ERROR_MARK)
9684 return NULL_TREE;
9686 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9687 return ret;
9689 switch (fcode)
9691 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9692 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9693 if (validate_arg (arg0, REAL_TYPE)
9694 && validate_arg (arg1, POINTER_TYPE))
9695 return do_mpfr_lgamma_r (arg0, arg1, type);
9696 break;
9698 CASE_FLT_FN (BUILT_IN_FREXP):
9699 return fold_builtin_frexp (loc, arg0, arg1, type);
9701 CASE_FLT_FN (BUILT_IN_MODF):
9702 return fold_builtin_modf (loc, arg0, arg1, type);
9704 case BUILT_IN_STRSPN:
9705 return fold_builtin_strspn (loc, arg0, arg1);
9707 case BUILT_IN_STRCSPN:
9708 return fold_builtin_strcspn (loc, arg0, arg1);
9710 case BUILT_IN_STRPBRK:
9711 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9713 case BUILT_IN_EXPECT:
9714 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
9716 case BUILT_IN_ISGREATER:
9717 return fold_builtin_unordered_cmp (loc, fndecl,
9718 arg0, arg1, UNLE_EXPR, LE_EXPR);
9719 case BUILT_IN_ISGREATEREQUAL:
9720 return fold_builtin_unordered_cmp (loc, fndecl,
9721 arg0, arg1, UNLT_EXPR, LT_EXPR);
9722 case BUILT_IN_ISLESS:
9723 return fold_builtin_unordered_cmp (loc, fndecl,
9724 arg0, arg1, UNGE_EXPR, GE_EXPR);
9725 case BUILT_IN_ISLESSEQUAL:
9726 return fold_builtin_unordered_cmp (loc, fndecl,
9727 arg0, arg1, UNGT_EXPR, GT_EXPR);
9728 case BUILT_IN_ISLESSGREATER:
9729 return fold_builtin_unordered_cmp (loc, fndecl,
9730 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9731 case BUILT_IN_ISUNORDERED:
9732 return fold_builtin_unordered_cmp (loc, fndecl,
9733 arg0, arg1, UNORDERED_EXPR,
9734 NOP_EXPR);
9736 /* We do the folding for va_start in the expander. */
9737 case BUILT_IN_VA_START:
9738 break;
9740 case BUILT_IN_OBJECT_SIZE:
9741 return fold_builtin_object_size (arg0, arg1);
9743 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9744 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9746 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9747 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9749 default:
9750 break;
9752 return NULL_TREE;
9755 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9756 and ARG2.
9757 This function returns NULL_TREE if no simplification was possible. */
9759 static tree
9760 fold_builtin_3 (location_t loc, tree fndecl,
9761 tree arg0, tree arg1, tree arg2)
9763 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9764 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9766 if (TREE_CODE (arg0) == ERROR_MARK
9767 || TREE_CODE (arg1) == ERROR_MARK
9768 || TREE_CODE (arg2) == ERROR_MARK)
9769 return NULL_TREE;
9771 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9772 arg0, arg1, arg2))
9773 return ret;
9775 switch (fcode)
9778 CASE_FLT_FN (BUILT_IN_SINCOS):
9779 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9781 CASE_FLT_FN (BUILT_IN_REMQUO):
9782 if (validate_arg (arg0, REAL_TYPE)
9783 && validate_arg (arg1, REAL_TYPE)
9784 && validate_arg (arg2, POINTER_TYPE))
9785 return do_mpfr_remquo (arg0, arg1, arg2);
9786 break;
9788 case BUILT_IN_MEMCMP:
9789 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9791 case BUILT_IN_EXPECT:
9792 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
9794 case BUILT_IN_EXPECT_WITH_PROBABILITY:
9795 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
9797 case BUILT_IN_ADD_OVERFLOW:
9798 case BUILT_IN_SUB_OVERFLOW:
9799 case BUILT_IN_MUL_OVERFLOW:
9800 case BUILT_IN_ADD_OVERFLOW_P:
9801 case BUILT_IN_SUB_OVERFLOW_P:
9802 case BUILT_IN_MUL_OVERFLOW_P:
9803 case BUILT_IN_SADD_OVERFLOW:
9804 case BUILT_IN_SADDL_OVERFLOW:
9805 case BUILT_IN_SADDLL_OVERFLOW:
9806 case BUILT_IN_SSUB_OVERFLOW:
9807 case BUILT_IN_SSUBL_OVERFLOW:
9808 case BUILT_IN_SSUBLL_OVERFLOW:
9809 case BUILT_IN_SMUL_OVERFLOW:
9810 case BUILT_IN_SMULL_OVERFLOW:
9811 case BUILT_IN_SMULLL_OVERFLOW:
9812 case BUILT_IN_UADD_OVERFLOW:
9813 case BUILT_IN_UADDL_OVERFLOW:
9814 case BUILT_IN_UADDLL_OVERFLOW:
9815 case BUILT_IN_USUB_OVERFLOW:
9816 case BUILT_IN_USUBL_OVERFLOW:
9817 case BUILT_IN_USUBLL_OVERFLOW:
9818 case BUILT_IN_UMUL_OVERFLOW:
9819 case BUILT_IN_UMULL_OVERFLOW:
9820 case BUILT_IN_UMULLL_OVERFLOW:
9821 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9823 default:
9824 break;
9826 return NULL_TREE;
9829 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9830 arguments. IGNORE is true if the result of the
9831 function call is ignored. This function returns NULL_TREE if no
9832 simplification was possible. */
9834 tree
9835 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9837 tree ret = NULL_TREE;
9839 switch (nargs)
9841 case 0:
9842 ret = fold_builtin_0 (loc, fndecl);
9843 break;
9844 case 1:
9845 ret = fold_builtin_1 (loc, fndecl, args[0]);
9846 break;
9847 case 2:
9848 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9849 break;
9850 case 3:
9851 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9852 break;
9853 default:
9854 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9855 break;
9857 if (ret)
9859 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9860 SET_EXPR_LOCATION (ret, loc);
9861 return ret;
9863 return NULL_TREE;
9866 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9867 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9868 of arguments in ARGS to be omitted. OLDNARGS is the number of
9869 elements in ARGS. */
9871 static tree
9872 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9873 int skip, tree fndecl, int n, va_list newargs)
9875 int nargs = oldnargs - skip + n;
9876 tree *buffer;
9878 if (n > 0)
9880 int i, j;
9882 buffer = XALLOCAVEC (tree, nargs);
9883 for (i = 0; i < n; i++)
9884 buffer[i] = va_arg (newargs, tree);
9885 for (j = skip; j < oldnargs; j++, i++)
9886 buffer[i] = args[j];
9888 else
9889 buffer = args + skip;
9891 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9894 /* Return true if FNDECL shouldn't be folded right now.
9895 If a built-in function has an inline attribute always_inline
9896 wrapper, defer folding it after always_inline functions have
9897 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9898 might not be performed. */
9900 bool
9901 avoid_folding_inline_builtin (tree fndecl)
9903 return (DECL_DECLARED_INLINE_P (fndecl)
9904 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9905 && cfun
9906 && !cfun->always_inline_functions_inlined
9907 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9910 /* A wrapper function for builtin folding that prevents warnings for
9911 "statement without effect" and the like, caused by removing the
9912 call node earlier than the warning is generated. */
9914 tree
9915 fold_call_expr (location_t loc, tree exp, bool ignore)
9917 tree ret = NULL_TREE;
9918 tree fndecl = get_callee_fndecl (exp);
9919 if (fndecl && fndecl_built_in_p (fndecl)
9920 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9921 yet. Defer folding until we see all the arguments
9922 (after inlining). */
9923 && !CALL_EXPR_VA_ARG_PACK (exp))
9925 int nargs = call_expr_nargs (exp);
9927 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9928 instead last argument is __builtin_va_arg_pack (). Defer folding
9929 even in that case, until arguments are finalized. */
9930 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9932 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9933 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9934 return NULL_TREE;
9937 if (avoid_folding_inline_builtin (fndecl))
9938 return NULL_TREE;
9940 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9941 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9942 CALL_EXPR_ARGP (exp), ignore);
9943 else
9945 tree *args = CALL_EXPR_ARGP (exp);
9946 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9947 if (ret)
9948 return ret;
9951 return NULL_TREE;
9954 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9955 N arguments are passed in the array ARGARRAY. Return a folded
9956 expression or NULL_TREE if no simplification was possible. */
9958 tree
9959 fold_builtin_call_array (location_t loc, tree,
9960 tree fn,
9961 int n,
9962 tree *argarray)
9964 if (TREE_CODE (fn) != ADDR_EXPR)
9965 return NULL_TREE;
9967 tree fndecl = TREE_OPERAND (fn, 0);
9968 if (TREE_CODE (fndecl) == FUNCTION_DECL
9969 && fndecl_built_in_p (fndecl))
9971 /* If last argument is __builtin_va_arg_pack (), arguments to this
9972 function are not finalized yet. Defer folding until they are. */
9973 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9975 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9976 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9977 return NULL_TREE;
9979 if (avoid_folding_inline_builtin (fndecl))
9980 return NULL_TREE;
9981 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9982 return targetm.fold_builtin (fndecl, n, argarray, false);
9983 else
9984 return fold_builtin_n (loc, fndecl, argarray, n, false);
9987 return NULL_TREE;
9990 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9991 along with N new arguments specified as the "..." parameters. SKIP
9992 is the number of arguments in EXP to be omitted. This function is used
9993 to do varargs-to-varargs transformations. */
9995 static tree
9996 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9998 va_list ap;
9999 tree t;
10001 va_start (ap, n);
10002 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10003 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10004 va_end (ap);
10006 return t;
10009 /* Validate a single argument ARG against a tree code CODE representing
10010 a type. Return true when argument is valid. */
10012 static bool
10013 validate_arg (const_tree arg, enum tree_code code)
10015 if (!arg)
10016 return false;
10017 else if (code == POINTER_TYPE)
10018 return POINTER_TYPE_P (TREE_TYPE (arg));
10019 else if (code == INTEGER_TYPE)
10020 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10021 return code == TREE_CODE (TREE_TYPE (arg));
10024 /* This function validates the types of a function call argument list
10025 against a specified list of tree_codes. If the last specifier is a 0,
10026 that represents an ellipses, otherwise the last specifier must be a
10027 VOID_TYPE.
10029 This is the GIMPLE version of validate_arglist. Eventually we want to
10030 completely convert builtins.c to work from GIMPLEs and the tree based
10031 validate_arglist will then be removed. */
10033 bool
10034 validate_gimple_arglist (const gcall *call, ...)
10036 enum tree_code code;
10037 bool res = 0;
10038 va_list ap;
10039 const_tree arg;
10040 size_t i;
10042 va_start (ap, call);
10043 i = 0;
10047 code = (enum tree_code) va_arg (ap, int);
10048 switch (code)
10050 case 0:
10051 /* This signifies an ellipses, any further arguments are all ok. */
10052 res = true;
10053 goto end;
10054 case VOID_TYPE:
10055 /* This signifies an endlink, if no arguments remain, return
10056 true, otherwise return false. */
10057 res = (i == gimple_call_num_args (call));
10058 goto end;
10059 default:
10060 /* If no parameters remain or the parameter's code does not
10061 match the specified code, return false. Otherwise continue
10062 checking any remaining arguments. */
10063 arg = gimple_call_arg (call, i++);
10064 if (!validate_arg (arg, code))
10065 goto end;
10066 break;
10069 while (1);
10071 /* We need gotos here since we can only have one VA_CLOSE in a
10072 function. */
10073 end: ;
10074 va_end (ap);
10076 return res;
10079 /* Default target-specific builtin expander that does nothing. */
10082 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10083 rtx target ATTRIBUTE_UNUSED,
10084 rtx subtarget ATTRIBUTE_UNUSED,
10085 machine_mode mode ATTRIBUTE_UNUSED,
10086 int ignore ATTRIBUTE_UNUSED)
10088 return NULL_RTX;
10091 /* Returns true is EXP represents data that would potentially reside
10092 in a readonly section. */
10094 bool
10095 readonly_data_expr (tree exp)
10097 STRIP_NOPS (exp);
10099 if (TREE_CODE (exp) != ADDR_EXPR)
10100 return false;
10102 exp = get_base_address (TREE_OPERAND (exp, 0));
10103 if (!exp)
10104 return false;
10106 /* Make sure we call decl_readonly_section only for trees it
10107 can handle (since it returns true for everything it doesn't
10108 understand). */
10109 if (TREE_CODE (exp) == STRING_CST
10110 || TREE_CODE (exp) == CONSTRUCTOR
10111 || (VAR_P (exp) && TREE_STATIC (exp)))
10112 return decl_readonly_section (exp, 0);
10113 else
10114 return false;
10117 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10118 to the call, and TYPE is its return type.
10120 Return NULL_TREE if no simplification was possible, otherwise return the
10121 simplified form of the call as a tree.
10123 The simplified form may be a constant or other expression which
10124 computes the same value, but in a more efficient manner (including
10125 calls to other builtin functions).
10127 The call may contain arguments which need to be evaluated, but
10128 which are not useful to determine the result of the call. In
10129 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10130 COMPOUND_EXPR will be an argument which must be evaluated.
10131 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10132 COMPOUND_EXPR in the chain will contain the tree for the simplified
10133 form of the builtin function call. */
10135 static tree
10136 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
10138 if (!validate_arg (s1, POINTER_TYPE)
10139 || !validate_arg (s2, POINTER_TYPE))
10140 return NULL_TREE;
10141 else
10143 tree fn;
10144 const char *p1, *p2;
10146 p2 = c_getstr (s2);
10147 if (p2 == NULL)
10148 return NULL_TREE;
10150 p1 = c_getstr (s1);
10151 if (p1 != NULL)
10153 const char *r = strpbrk (p1, p2);
10154 tree tem;
10156 if (r == NULL)
10157 return build_int_cst (TREE_TYPE (s1), 0);
10159 /* Return an offset into the constant string argument. */
10160 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10161 return fold_convert_loc (loc, type, tem);
10164 if (p2[0] == '\0')
10165 /* strpbrk(x, "") == NULL.
10166 Evaluate and ignore s1 in case it had side-effects. */
10167 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
10169 if (p2[1] != '\0')
10170 return NULL_TREE; /* Really call strpbrk. */
10172 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10173 if (!fn)
10174 return NULL_TREE;
10176 /* New argument list transforming strpbrk(s1, s2) to
10177 strchr(s1, s2[0]). */
10178 return build_call_expr_loc (loc, fn, 2, s1,
10179 build_int_cst (integer_type_node, p2[0]));
10183 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10184 to the call.
10186 Return NULL_TREE if no simplification was possible, otherwise return the
10187 simplified form of the call as a tree.
10189 The simplified form may be a constant or other expression which
10190 computes the same value, but in a more efficient manner (including
10191 calls to other builtin functions).
10193 The call may contain arguments which need to be evaluated, but
10194 which are not useful to determine the result of the call. In
10195 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10196 COMPOUND_EXPR will be an argument which must be evaluated.
10197 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10198 COMPOUND_EXPR in the chain will contain the tree for the simplified
10199 form of the builtin function call. */
10201 static tree
10202 fold_builtin_strspn (location_t loc, tree s1, tree s2)
10204 if (!validate_arg (s1, POINTER_TYPE)
10205 || !validate_arg (s2, POINTER_TYPE))
10206 return NULL_TREE;
10207 else
10209 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10211 /* If either argument is "", return NULL_TREE. */
10212 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10213 /* Evaluate and ignore both arguments in case either one has
10214 side-effects. */
10215 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
10216 s1, s2);
10217 return NULL_TREE;
10221 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10222 to the call.
10224 Return NULL_TREE if no simplification was possible, otherwise return the
10225 simplified form of the call as a tree.
10227 The simplified form may be a constant or other expression which
10228 computes the same value, but in a more efficient manner (including
10229 calls to other builtin functions).
10231 The call may contain arguments which need to be evaluated, but
10232 which are not useful to determine the result of the call. In
10233 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10234 COMPOUND_EXPR will be an argument which must be evaluated.
10235 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10236 COMPOUND_EXPR in the chain will contain the tree for the simplified
10237 form of the builtin function call. */
10239 static tree
10240 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
10242 if (!validate_arg (s1, POINTER_TYPE)
10243 || !validate_arg (s2, POINTER_TYPE))
10244 return NULL_TREE;
10245 else
10247 /* If the first argument is "", return NULL_TREE. */
10248 const char *p1 = c_getstr (s1);
10249 if (p1 && *p1 == '\0')
10251 /* Evaluate and ignore argument s2 in case it has
10252 side-effects. */
10253 return omit_one_operand_loc (loc, size_type_node,
10254 size_zero_node, s2);
10257 /* If the second argument is "", return __builtin_strlen(s1). */
10258 const char *p2 = c_getstr (s2);
10259 if (p2 && *p2 == '\0')
10261 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
10263 /* If the replacement _DECL isn't initialized, don't do the
10264 transformation. */
10265 if (!fn)
10266 return NULL_TREE;
10268 return build_call_expr_loc (loc, fn, 1, s1);
10270 return NULL_TREE;
10274 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10275 produced. False otherwise. This is done so that we don't output the error
10276 or warning twice or three times. */
10278 bool
10279 fold_builtin_next_arg (tree exp, bool va_start_p)
10281 tree fntype = TREE_TYPE (current_function_decl);
10282 int nargs = call_expr_nargs (exp);
10283 tree arg;
10284 /* There is good chance the current input_location points inside the
10285 definition of the va_start macro (perhaps on the token for
10286 builtin) in a system header, so warnings will not be emitted.
10287 Use the location in real source code. */
10288 location_t current_location =
10289 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10290 NULL);
10292 if (!stdarg_p (fntype))
10294 error ("%<va_start%> used in function with fixed arguments");
10295 return true;
10298 if (va_start_p)
10300 if (va_start_p && (nargs != 2))
10302 error ("wrong number of arguments to function %<va_start%>");
10303 return true;
10305 arg = CALL_EXPR_ARG (exp, 1);
10307 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10308 when we checked the arguments and if needed issued a warning. */
10309 else
10311 if (nargs == 0)
10313 /* Evidently an out of date version of <stdarg.h>; can't validate
10314 va_start's second argument, but can still work as intended. */
10315 warning_at (current_location,
10316 OPT_Wvarargs,
10317 "%<__builtin_next_arg%> called without an argument");
10318 return true;
10320 else if (nargs > 1)
10322 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10323 return true;
10325 arg = CALL_EXPR_ARG (exp, 0);
10328 if (TREE_CODE (arg) == SSA_NAME)
10329 arg = SSA_NAME_VAR (arg);
10331 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10332 or __builtin_next_arg (0) the first time we see it, after checking
10333 the arguments and if needed issuing a warning. */
10334 if (!integer_zerop (arg))
10336 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10338 /* Strip off all nops for the sake of the comparison. This
10339 is not quite the same as STRIP_NOPS. It does more.
10340 We must also strip off INDIRECT_EXPR for C++ reference
10341 parameters. */
10342 while (CONVERT_EXPR_P (arg)
10343 || TREE_CODE (arg) == INDIRECT_REF)
10344 arg = TREE_OPERAND (arg, 0);
10345 if (arg != last_parm)
10347 /* FIXME: Sometimes with the tree optimizers we can get the
10348 not the last argument even though the user used the last
10349 argument. We just warn and set the arg to be the last
10350 argument so that we will get wrong-code because of
10351 it. */
10352 warning_at (current_location,
10353 OPT_Wvarargs,
10354 "second parameter of %<va_start%> not last named argument");
10357 /* Undefined by C99 7.15.1.4p4 (va_start):
10358 "If the parameter parmN is declared with the register storage
10359 class, with a function or array type, or with a type that is
10360 not compatible with the type that results after application of
10361 the default argument promotions, the behavior is undefined."
10363 else if (DECL_REGISTER (arg))
10365 warning_at (current_location,
10366 OPT_Wvarargs,
10367 "undefined behavior when second parameter of "
10368 "%<va_start%> is declared with %<register%> storage");
10371 /* We want to verify the second parameter just once before the tree
10372 optimizers are run and then avoid keeping it in the tree,
10373 as otherwise we could warn even for correct code like:
10374 void foo (int i, ...)
10375 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10376 if (va_start_p)
10377 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10378 else
10379 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10381 return false;
10385 /* Expand a call EXP to __builtin_object_size. */
10387 static rtx
10388 expand_builtin_object_size (tree exp)
10390 tree ost;
10391 int object_size_type;
10392 tree fndecl = get_callee_fndecl (exp);
10394 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10396 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
10397 exp, fndecl);
10398 expand_builtin_trap ();
10399 return const0_rtx;
10402 ost = CALL_EXPR_ARG (exp, 1);
10403 STRIP_NOPS (ost);
10405 if (TREE_CODE (ost) != INTEGER_CST
10406 || tree_int_cst_sgn (ost) < 0
10407 || compare_tree_int (ost, 3) > 0)
10409 error ("%Klast argument of %qD is not integer constant between 0 and 3",
10410 exp, fndecl);
10411 expand_builtin_trap ();
10412 return const0_rtx;
10415 object_size_type = tree_to_shwi (ost);
10417 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10420 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10421 FCODE is the BUILT_IN_* to use.
10422 Return NULL_RTX if we failed; the caller should emit a normal call,
10423 otherwise try to get the result in TARGET, if convenient (and in
10424 mode MODE if that's convenient). */
10426 static rtx
10427 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10428 enum built_in_function fcode)
10430 if (!validate_arglist (exp,
10431 POINTER_TYPE,
10432 fcode == BUILT_IN_MEMSET_CHK
10433 ? INTEGER_TYPE : POINTER_TYPE,
10434 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10435 return NULL_RTX;
10437 tree dest = CALL_EXPR_ARG (exp, 0);
10438 tree src = CALL_EXPR_ARG (exp, 1);
10439 tree len = CALL_EXPR_ARG (exp, 2);
10440 tree size = CALL_EXPR_ARG (exp, 3);
10442 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
10443 /*str=*/NULL_TREE, size);
10445 if (!tree_fits_uhwi_p (size))
10446 return NULL_RTX;
10448 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10450 /* Avoid transforming the checking call to an ordinary one when
10451 an overflow has been detected or when the call couldn't be
10452 validated because the size is not constant. */
10453 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10454 return NULL_RTX;
10456 tree fn = NULL_TREE;
10457 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10458 mem{cpy,pcpy,move,set} is available. */
10459 switch (fcode)
10461 case BUILT_IN_MEMCPY_CHK:
10462 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10463 break;
10464 case BUILT_IN_MEMPCPY_CHK:
10465 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10466 break;
10467 case BUILT_IN_MEMMOVE_CHK:
10468 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10469 break;
10470 case BUILT_IN_MEMSET_CHK:
10471 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10472 break;
10473 default:
10474 break;
10477 if (! fn)
10478 return NULL_RTX;
10480 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10481 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10482 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10483 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10485 else if (fcode == BUILT_IN_MEMSET_CHK)
10486 return NULL_RTX;
10487 else
10489 unsigned int dest_align = get_pointer_alignment (dest);
10491 /* If DEST is not a pointer type, call the normal function. */
10492 if (dest_align == 0)
10493 return NULL_RTX;
10495 /* If SRC and DEST are the same (and not volatile), do nothing. */
10496 if (operand_equal_p (src, dest, 0))
10498 tree expr;
10500 if (fcode != BUILT_IN_MEMPCPY_CHK)
10502 /* Evaluate and ignore LEN in case it has side-effects. */
10503 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10504 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10507 expr = fold_build_pointer_plus (dest, len);
10508 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10511 /* __memmove_chk special case. */
10512 if (fcode == BUILT_IN_MEMMOVE_CHK)
10514 unsigned int src_align = get_pointer_alignment (src);
10516 if (src_align == 0)
10517 return NULL_RTX;
10519 /* If src is categorized for a readonly section we can use
10520 normal __memcpy_chk. */
10521 if (readonly_data_expr (src))
10523 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10524 if (!fn)
10525 return NULL_RTX;
10526 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10527 dest, src, len, size);
10528 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10529 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10530 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10533 return NULL_RTX;
10537 /* Emit warning if a buffer overflow is detected at compile time. */
10539 static void
10540 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10542 /* The source string. */
10543 tree srcstr = NULL_TREE;
10544 /* The size of the destination object. */
10545 tree objsize = NULL_TREE;
10546 /* The string that is being concatenated with (as in __strcat_chk)
10547 or null if it isn't. */
10548 tree catstr = NULL_TREE;
10549 /* The maximum length of the source sequence in a bounded operation
10550 (such as __strncat_chk) or null if the operation isn't bounded
10551 (such as __strcat_chk). */
10552 tree maxread = NULL_TREE;
10553 /* The exact size of the access (such as in __strncpy_chk). */
10554 tree size = NULL_TREE;
10556 switch (fcode)
10558 case BUILT_IN_STRCPY_CHK:
10559 case BUILT_IN_STPCPY_CHK:
10560 srcstr = CALL_EXPR_ARG (exp, 1);
10561 objsize = CALL_EXPR_ARG (exp, 2);
10562 break;
10564 case BUILT_IN_STRCAT_CHK:
10565 /* For __strcat_chk the warning will be emitted only if overflowing
10566 by at least strlen (dest) + 1 bytes. */
10567 catstr = CALL_EXPR_ARG (exp, 0);
10568 srcstr = CALL_EXPR_ARG (exp, 1);
10569 objsize = CALL_EXPR_ARG (exp, 2);
10570 break;
10572 case BUILT_IN_STRNCAT_CHK:
10573 catstr = CALL_EXPR_ARG (exp, 0);
10574 srcstr = CALL_EXPR_ARG (exp, 1);
10575 maxread = CALL_EXPR_ARG (exp, 2);
10576 objsize = CALL_EXPR_ARG (exp, 3);
10577 break;
10579 case BUILT_IN_STRNCPY_CHK:
10580 case BUILT_IN_STPNCPY_CHK:
10581 srcstr = CALL_EXPR_ARG (exp, 1);
10582 size = CALL_EXPR_ARG (exp, 2);
10583 objsize = CALL_EXPR_ARG (exp, 3);
10584 break;
10586 case BUILT_IN_SNPRINTF_CHK:
10587 case BUILT_IN_VSNPRINTF_CHK:
10588 maxread = CALL_EXPR_ARG (exp, 1);
10589 objsize = CALL_EXPR_ARG (exp, 3);
10590 break;
10591 default:
10592 gcc_unreachable ();
10595 if (catstr && maxread)
10597 /* Check __strncat_chk. There is no way to determine the length
10598 of the string to which the source string is being appended so
10599 just warn when the length of the source string is not known. */
10600 check_strncat_sizes (exp, objsize);
10601 return;
10604 /* The destination argument is the first one for all built-ins above. */
10605 tree dst = CALL_EXPR_ARG (exp, 0);
10607 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
10610 /* Emit warning if a buffer overflow is detected at compile time
10611 in __sprintf_chk/__vsprintf_chk calls. */
10613 static void
10614 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10616 tree size, len, fmt;
10617 const char *fmt_str;
10618 int nargs = call_expr_nargs (exp);
10620 /* Verify the required arguments in the original call. */
10622 if (nargs < 4)
10623 return;
10624 size = CALL_EXPR_ARG (exp, 2);
10625 fmt = CALL_EXPR_ARG (exp, 3);
10627 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10628 return;
10630 /* Check whether the format is a literal string constant. */
10631 fmt_str = c_getstr (fmt);
10632 if (fmt_str == NULL)
10633 return;
10635 if (!init_target_chars ())
10636 return;
10638 /* If the format doesn't contain % args or %%, we know its size. */
10639 if (strchr (fmt_str, target_percent) == 0)
10640 len = build_int_cstu (size_type_node, strlen (fmt_str));
10641 /* If the format is "%s" and first ... argument is a string literal,
10642 we know it too. */
10643 else if (fcode == BUILT_IN_SPRINTF_CHK
10644 && strcmp (fmt_str, target_percent_s) == 0)
10646 tree arg;
10648 if (nargs < 5)
10649 return;
10650 arg = CALL_EXPR_ARG (exp, 4);
10651 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10652 return;
10654 len = c_strlen (arg, 1);
10655 if (!len || ! tree_fits_uhwi_p (len))
10656 return;
10658 else
10659 return;
10661 /* Add one for the terminating nul. */
10662 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10664 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
10665 /*maxread=*/NULL_TREE, len, size);
10668 /* Emit warning if a free is called with address of a variable. */
10670 static void
10671 maybe_emit_free_warning (tree exp)
10673 if (call_expr_nargs (exp) != 1)
10674 return;
10676 tree arg = CALL_EXPR_ARG (exp, 0);
10678 STRIP_NOPS (arg);
10679 if (TREE_CODE (arg) != ADDR_EXPR)
10680 return;
10682 arg = get_base_address (TREE_OPERAND (arg, 0));
10683 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
10684 return;
10686 if (SSA_VAR_P (arg))
10687 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10688 "%Kattempt to free a non-heap object %qD", exp, arg);
10689 else
10690 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10691 "%Kattempt to free a non-heap object", exp);
10694 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10695 if possible. */
10697 static tree
10698 fold_builtin_object_size (tree ptr, tree ost)
10700 unsigned HOST_WIDE_INT bytes;
10701 int object_size_type;
10703 if (!validate_arg (ptr, POINTER_TYPE)
10704 || !validate_arg (ost, INTEGER_TYPE))
10705 return NULL_TREE;
10707 STRIP_NOPS (ost);
10709 if (TREE_CODE (ost) != INTEGER_CST
10710 || tree_int_cst_sgn (ost) < 0
10711 || compare_tree_int (ost, 3) > 0)
10712 return NULL_TREE;
10714 object_size_type = tree_to_shwi (ost);
10716 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10717 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10718 and (size_t) 0 for types 2 and 3. */
10719 if (TREE_SIDE_EFFECTS (ptr))
10720 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10722 if (TREE_CODE (ptr) == ADDR_EXPR)
10724 compute_builtin_object_size (ptr, object_size_type, &bytes);
10725 if (wi::fits_to_tree_p (bytes, size_type_node))
10726 return build_int_cstu (size_type_node, bytes);
10728 else if (TREE_CODE (ptr) == SSA_NAME)
10730 /* If object size is not known yet, delay folding until
10731 later. Maybe subsequent passes will help determining
10732 it. */
10733 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10734 && wi::fits_to_tree_p (bytes, size_type_node))
10735 return build_int_cstu (size_type_node, bytes);
10738 return NULL_TREE;
10741 /* Builtins with folding operations that operate on "..." arguments
10742 need special handling; we need to store the arguments in a convenient
10743 data structure before attempting any folding. Fortunately there are
10744 only a few builtins that fall into this category. FNDECL is the
10745 function, EXP is the CALL_EXPR for the call. */
10747 static tree
10748 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10750 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10751 tree ret = NULL_TREE;
10753 switch (fcode)
10755 case BUILT_IN_FPCLASSIFY:
10756 ret = fold_builtin_fpclassify (loc, args, nargs);
10757 break;
10759 default:
10760 break;
10762 if (ret)
10764 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10765 SET_EXPR_LOCATION (ret, loc);
10766 TREE_NO_WARNING (ret) = 1;
10767 return ret;
10769 return NULL_TREE;
10772 /* Initialize format string characters in the target charset. */
10774 bool
10775 init_target_chars (void)
10777 static bool init;
10778 if (!init)
10780 target_newline = lang_hooks.to_target_charset ('\n');
10781 target_percent = lang_hooks.to_target_charset ('%');
10782 target_c = lang_hooks.to_target_charset ('c');
10783 target_s = lang_hooks.to_target_charset ('s');
10784 if (target_newline == 0 || target_percent == 0 || target_c == 0
10785 || target_s == 0)
10786 return false;
10788 target_percent_c[0] = target_percent;
10789 target_percent_c[1] = target_c;
10790 target_percent_c[2] = '\0';
10792 target_percent_s[0] = target_percent;
10793 target_percent_s[1] = target_s;
10794 target_percent_s[2] = '\0';
10796 target_percent_s_newline[0] = target_percent;
10797 target_percent_s_newline[1] = target_s;
10798 target_percent_s_newline[2] = target_newline;
10799 target_percent_s_newline[3] = '\0';
10801 init = true;
10803 return true;
10806 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10807 and no overflow/underflow occurred. INEXACT is true if M was not
10808 exactly calculated. TYPE is the tree type for the result. This
10809 function assumes that you cleared the MPFR flags and then
10810 calculated M to see if anything subsequently set a flag prior to
10811 entering this function. Return NULL_TREE if any checks fail. */
10813 static tree
10814 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10816 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10817 overflow/underflow occurred. If -frounding-math, proceed iff the
10818 result of calling FUNC was exact. */
10819 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10820 && (!flag_rounding_math || !inexact))
10822 REAL_VALUE_TYPE rr;
10824 real_from_mpfr (&rr, m, type, GMP_RNDN);
10825 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10826 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10827 but the mpft_t is not, then we underflowed in the
10828 conversion. */
10829 if (real_isfinite (&rr)
10830 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10832 REAL_VALUE_TYPE rmode;
10834 real_convert (&rmode, TYPE_MODE (type), &rr);
10835 /* Proceed iff the specified mode can hold the value. */
10836 if (real_identical (&rmode, &rr))
10837 return build_real (type, rmode);
10840 return NULL_TREE;
10843 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10844 number and no overflow/underflow occurred. INEXACT is true if M
10845 was not exactly calculated. TYPE is the tree type for the result.
10846 This function assumes that you cleared the MPFR flags and then
10847 calculated M to see if anything subsequently set a flag prior to
10848 entering this function. Return NULL_TREE if any checks fail, if
10849 FORCE_CONVERT is true, then bypass the checks. */
10851 static tree
10852 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10854 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10855 overflow/underflow occurred. If -frounding-math, proceed iff the
10856 result of calling FUNC was exact. */
10857 if (force_convert
10858 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10859 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10860 && (!flag_rounding_math || !inexact)))
10862 REAL_VALUE_TYPE re, im;
10864 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10865 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10866 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10867 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10868 but the mpft_t is not, then we underflowed in the
10869 conversion. */
10870 if (force_convert
10871 || (real_isfinite (&re) && real_isfinite (&im)
10872 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10873 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10875 REAL_VALUE_TYPE re_mode, im_mode;
10877 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10878 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10879 /* Proceed iff the specified mode can hold the value. */
10880 if (force_convert
10881 || (real_identical (&re_mode, &re)
10882 && real_identical (&im_mode, &im)))
10883 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10884 build_real (TREE_TYPE (type), im_mode));
10887 return NULL_TREE;
10890 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10891 the pointer *(ARG_QUO) and return the result. The type is taken
10892 from the type of ARG0 and is used for setting the precision of the
10893 calculation and results. */
10895 static tree
10896 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10898 tree const type = TREE_TYPE (arg0);
10899 tree result = NULL_TREE;
10901 STRIP_NOPS (arg0);
10902 STRIP_NOPS (arg1);
10904 /* To proceed, MPFR must exactly represent the target floating point
10905 format, which only happens when the target base equals two. */
10906 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10907 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10908 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10910 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10911 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10913 if (real_isfinite (ra0) && real_isfinite (ra1))
10915 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10916 const int prec = fmt->p;
10917 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10918 tree result_rem;
10919 long integer_quo;
10920 mpfr_t m0, m1;
10922 mpfr_inits2 (prec, m0, m1, NULL);
10923 mpfr_from_real (m0, ra0, GMP_RNDN);
10924 mpfr_from_real (m1, ra1, GMP_RNDN);
10925 mpfr_clear_flags ();
10926 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10927 /* Remquo is independent of the rounding mode, so pass
10928 inexact=0 to do_mpfr_ckconv(). */
10929 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10930 mpfr_clears (m0, m1, NULL);
10931 if (result_rem)
10933 /* MPFR calculates quo in the host's long so it may
10934 return more bits in quo than the target int can hold
10935 if sizeof(host long) > sizeof(target int). This can
10936 happen even for native compilers in LP64 mode. In
10937 these cases, modulo the quo value with the largest
10938 number that the target int can hold while leaving one
10939 bit for the sign. */
10940 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10941 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10943 /* Dereference the quo pointer argument. */
10944 arg_quo = build_fold_indirect_ref (arg_quo);
10945 /* Proceed iff a valid pointer type was passed in. */
10946 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10948 /* Set the value. */
10949 tree result_quo
10950 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10951 build_int_cst (TREE_TYPE (arg_quo),
10952 integer_quo));
10953 TREE_SIDE_EFFECTS (result_quo) = 1;
10954 /* Combine the quo assignment with the rem. */
10955 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10956 result_quo, result_rem));
10961 return result;
10964 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10965 resulting value as a tree with type TYPE. The mpfr precision is
10966 set to the precision of TYPE. We assume that this mpfr function
10967 returns zero if the result could be calculated exactly within the
10968 requested precision. In addition, the integer pointer represented
10969 by ARG_SG will be dereferenced and set to the appropriate signgam
10970 (-1,1) value. */
10972 static tree
10973 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10975 tree result = NULL_TREE;
10977 STRIP_NOPS (arg);
10979 /* To proceed, MPFR must exactly represent the target floating point
10980 format, which only happens when the target base equals two. Also
10981 verify ARG is a constant and that ARG_SG is an int pointer. */
10982 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10983 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10984 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10985 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10987 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10989 /* In addition to NaN and Inf, the argument cannot be zero or a
10990 negative integer. */
10991 if (real_isfinite (ra)
10992 && ra->cl != rvc_zero
10993 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10995 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10996 const int prec = fmt->p;
10997 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10998 int inexact, sg;
10999 mpfr_t m;
11000 tree result_lg;
11002 mpfr_init2 (m, prec);
11003 mpfr_from_real (m, ra, GMP_RNDN);
11004 mpfr_clear_flags ();
11005 inexact = mpfr_lgamma (m, &sg, m, rnd);
11006 result_lg = do_mpfr_ckconv (m, type, inexact);
11007 mpfr_clear (m);
11008 if (result_lg)
11010 tree result_sg;
11012 /* Dereference the arg_sg pointer argument. */
11013 arg_sg = build_fold_indirect_ref (arg_sg);
11014 /* Assign the signgam value into *arg_sg. */
11015 result_sg = fold_build2 (MODIFY_EXPR,
11016 TREE_TYPE (arg_sg), arg_sg,
11017 build_int_cst (TREE_TYPE (arg_sg), sg));
11018 TREE_SIDE_EFFECTS (result_sg) = 1;
11019 /* Combine the signgam assignment with the lgamma result. */
11020 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11021 result_sg, result_lg));
11026 return result;
11029 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
11030 mpc function FUNC on it and return the resulting value as a tree
11031 with type TYPE. The mpfr precision is set to the precision of
11032 TYPE. We assume that function FUNC returns zero if the result
11033 could be calculated exactly within the requested precision. If
11034 DO_NONFINITE is true, then fold expressions containing Inf or NaN
11035 in the arguments and/or results. */
11037 tree
11038 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
11039 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
11041 tree result = NULL_TREE;
11043 STRIP_NOPS (arg0);
11044 STRIP_NOPS (arg1);
11046 /* To proceed, MPFR must exactly represent the target floating point
11047 format, which only happens when the target base equals two. */
11048 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
11049 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
11050 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
11051 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
11052 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
11054 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
11055 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
11056 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
11057 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
11059 if (do_nonfinite
11060 || (real_isfinite (re0) && real_isfinite (im0)
11061 && real_isfinite (re1) && real_isfinite (im1)))
11063 const struct real_format *const fmt =
11064 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11065 const int prec = fmt->p;
11066 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
11067 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11068 int inexact;
11069 mpc_t m0, m1;
11071 mpc_init2 (m0, prec);
11072 mpc_init2 (m1, prec);
11073 mpfr_from_real (mpc_realref (m0), re0, rnd);
11074 mpfr_from_real (mpc_imagref (m0), im0, rnd);
11075 mpfr_from_real (mpc_realref (m1), re1, rnd);
11076 mpfr_from_real (mpc_imagref (m1), im1, rnd);
11077 mpfr_clear_flags ();
11078 inexact = func (m0, m0, m1, crnd);
11079 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
11080 mpc_clear (m0);
11081 mpc_clear (m1);
11085 return result;
11088 /* A wrapper function for builtin folding that prevents warnings for
11089 "statement without effect" and the like, caused by removing the
11090 call node earlier than the warning is generated. */
11092 tree
11093 fold_call_stmt (gcall *stmt, bool ignore)
11095 tree ret = NULL_TREE;
11096 tree fndecl = gimple_call_fndecl (stmt);
11097 location_t loc = gimple_location (stmt);
11098 if (fndecl && fndecl_built_in_p (fndecl)
11099 && !gimple_call_va_arg_pack_p (stmt))
11101 int nargs = gimple_call_num_args (stmt);
11102 tree *args = (nargs > 0
11103 ? gimple_call_arg_ptr (stmt, 0)
11104 : &error_mark_node);
11106 if (avoid_folding_inline_builtin (fndecl))
11107 return NULL_TREE;
11108 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11110 return targetm.fold_builtin (fndecl, nargs, args, ignore);
11112 else
11114 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11115 if (ret)
11117 /* Propagate location information from original call to
11118 expansion of builtin. Otherwise things like
11119 maybe_emit_chk_warning, that operate on the expansion
11120 of a builtin, will use the wrong location information. */
11121 if (gimple_has_location (stmt))
11123 tree realret = ret;
11124 if (TREE_CODE (ret) == NOP_EXPR)
11125 realret = TREE_OPERAND (ret, 0);
11126 if (CAN_HAVE_LOCATION_P (realret)
11127 && !EXPR_HAS_LOCATION (realret))
11128 SET_EXPR_LOCATION (realret, loc);
11129 return realret;
11131 return ret;
11135 return NULL_TREE;
11138 /* Look up the function in builtin_decl that corresponds to DECL
11139 and set ASMSPEC as its user assembler name. DECL must be a
11140 function decl that declares a builtin. */
11142 void
11143 set_builtin_user_assembler_name (tree decl, const char *asmspec)
11145 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
11146 && asmspec != 0);
11148 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
11149 set_user_assembler_name (builtin, asmspec);
11151 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
11152 && INT_TYPE_SIZE < BITS_PER_WORD)
11154 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
11155 set_user_assembler_libfunc ("ffs", asmspec);
11156 set_optab_libfunc (ffs_optab, mode, "ffs");
11160 /* Return true if DECL is a builtin that expands to a constant or similarly
11161 simple code. */
11162 bool
11163 is_simple_builtin (tree decl)
11165 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
11166 switch (DECL_FUNCTION_CODE (decl))
11168 /* Builtins that expand to constants. */
11169 case BUILT_IN_CONSTANT_P:
11170 case BUILT_IN_EXPECT:
11171 case BUILT_IN_OBJECT_SIZE:
11172 case BUILT_IN_UNREACHABLE:
11173 /* Simple register moves or loads from stack. */
11174 case BUILT_IN_ASSUME_ALIGNED:
11175 case BUILT_IN_RETURN_ADDRESS:
11176 case BUILT_IN_EXTRACT_RETURN_ADDR:
11177 case BUILT_IN_FROB_RETURN_ADDR:
11178 case BUILT_IN_RETURN:
11179 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
11180 case BUILT_IN_FRAME_ADDRESS:
11181 case BUILT_IN_VA_END:
11182 case BUILT_IN_STACK_SAVE:
11183 case BUILT_IN_STACK_RESTORE:
11184 /* Exception state returns or moves registers around. */
11185 case BUILT_IN_EH_FILTER:
11186 case BUILT_IN_EH_POINTER:
11187 case BUILT_IN_EH_COPY_VALUES:
11188 return true;
11190 default:
11191 return false;
11194 return false;
11197 /* Return true if DECL is a builtin that is not expensive, i.e., they are
11198 most probably expanded inline into reasonably simple code. This is a
11199 superset of is_simple_builtin. */
11200 bool
11201 is_inexpensive_builtin (tree decl)
11203 if (!decl)
11204 return false;
11205 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
11206 return true;
11207 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11208 switch (DECL_FUNCTION_CODE (decl))
11210 case BUILT_IN_ABS:
11211 CASE_BUILT_IN_ALLOCA:
11212 case BUILT_IN_BSWAP16:
11213 case BUILT_IN_BSWAP32:
11214 case BUILT_IN_BSWAP64:
11215 case BUILT_IN_CLZ:
11216 case BUILT_IN_CLZIMAX:
11217 case BUILT_IN_CLZL:
11218 case BUILT_IN_CLZLL:
11219 case BUILT_IN_CTZ:
11220 case BUILT_IN_CTZIMAX:
11221 case BUILT_IN_CTZL:
11222 case BUILT_IN_CTZLL:
11223 case BUILT_IN_FFS:
11224 case BUILT_IN_FFSIMAX:
11225 case BUILT_IN_FFSL:
11226 case BUILT_IN_FFSLL:
11227 case BUILT_IN_IMAXABS:
11228 case BUILT_IN_FINITE:
11229 case BUILT_IN_FINITEF:
11230 case BUILT_IN_FINITEL:
11231 case BUILT_IN_FINITED32:
11232 case BUILT_IN_FINITED64:
11233 case BUILT_IN_FINITED128:
11234 case BUILT_IN_FPCLASSIFY:
11235 case BUILT_IN_ISFINITE:
11236 case BUILT_IN_ISINF_SIGN:
11237 case BUILT_IN_ISINF:
11238 case BUILT_IN_ISINFF:
11239 case BUILT_IN_ISINFL:
11240 case BUILT_IN_ISINFD32:
11241 case BUILT_IN_ISINFD64:
11242 case BUILT_IN_ISINFD128:
11243 case BUILT_IN_ISNAN:
11244 case BUILT_IN_ISNANF:
11245 case BUILT_IN_ISNANL:
11246 case BUILT_IN_ISNAND32:
11247 case BUILT_IN_ISNAND64:
11248 case BUILT_IN_ISNAND128:
11249 case BUILT_IN_ISNORMAL:
11250 case BUILT_IN_ISGREATER:
11251 case BUILT_IN_ISGREATEREQUAL:
11252 case BUILT_IN_ISLESS:
11253 case BUILT_IN_ISLESSEQUAL:
11254 case BUILT_IN_ISLESSGREATER:
11255 case BUILT_IN_ISUNORDERED:
11256 case BUILT_IN_VA_ARG_PACK:
11257 case BUILT_IN_VA_ARG_PACK_LEN:
11258 case BUILT_IN_VA_COPY:
11259 case BUILT_IN_TRAP:
11260 case BUILT_IN_SAVEREGS:
11261 case BUILT_IN_POPCOUNTL:
11262 case BUILT_IN_POPCOUNTLL:
11263 case BUILT_IN_POPCOUNTIMAX:
11264 case BUILT_IN_POPCOUNT:
11265 case BUILT_IN_PARITYL:
11266 case BUILT_IN_PARITYLL:
11267 case BUILT_IN_PARITYIMAX:
11268 case BUILT_IN_PARITY:
11269 case BUILT_IN_LABS:
11270 case BUILT_IN_LLABS:
11271 case BUILT_IN_PREFETCH:
11272 case BUILT_IN_ACC_ON_DEVICE:
11273 return true;
11275 default:
11276 return is_simple_builtin (decl);
11279 return false;
11282 /* Return true if T is a constant and the value cast to a target char
11283 can be represented by a host char.
11284 Store the casted char constant in *P if so. */
11286 bool
11287 target_char_cst_p (tree t, char *p)
11289 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
11290 return false;
11292 *p = (char)tree_to_uhwi (t);
11293 return true;
11296 /* Return true if the builtin DECL is implemented in a standard library.
11297 Otherwise returns false which doesn't guarantee it is not (thus the list of
11298 handled builtins below may be incomplete). */
11300 bool
11301 builtin_with_linkage_p (tree decl)
11303 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11304 switch (DECL_FUNCTION_CODE (decl))
11306 CASE_FLT_FN (BUILT_IN_ACOS):
11307 CASE_FLT_FN (BUILT_IN_ACOSH):
11308 CASE_FLT_FN (BUILT_IN_ASIN):
11309 CASE_FLT_FN (BUILT_IN_ASINH):
11310 CASE_FLT_FN (BUILT_IN_ATAN):
11311 CASE_FLT_FN (BUILT_IN_ATANH):
11312 CASE_FLT_FN (BUILT_IN_ATAN2):
11313 CASE_FLT_FN (BUILT_IN_CBRT):
11314 CASE_FLT_FN (BUILT_IN_CEIL):
11315 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
11316 CASE_FLT_FN (BUILT_IN_COPYSIGN):
11317 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
11318 CASE_FLT_FN (BUILT_IN_COS):
11319 CASE_FLT_FN (BUILT_IN_COSH):
11320 CASE_FLT_FN (BUILT_IN_ERF):
11321 CASE_FLT_FN (BUILT_IN_ERFC):
11322 CASE_FLT_FN (BUILT_IN_EXP):
11323 CASE_FLT_FN (BUILT_IN_EXP2):
11324 CASE_FLT_FN (BUILT_IN_EXPM1):
11325 CASE_FLT_FN (BUILT_IN_FABS):
11326 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
11327 CASE_FLT_FN (BUILT_IN_FDIM):
11328 CASE_FLT_FN (BUILT_IN_FLOOR):
11329 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
11330 CASE_FLT_FN (BUILT_IN_FMA):
11331 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
11332 CASE_FLT_FN (BUILT_IN_FMAX):
11333 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
11334 CASE_FLT_FN (BUILT_IN_FMIN):
11335 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
11336 CASE_FLT_FN (BUILT_IN_FMOD):
11337 CASE_FLT_FN (BUILT_IN_FREXP):
11338 CASE_FLT_FN (BUILT_IN_HYPOT):
11339 CASE_FLT_FN (BUILT_IN_ILOGB):
11340 CASE_FLT_FN (BUILT_IN_LDEXP):
11341 CASE_FLT_FN (BUILT_IN_LGAMMA):
11342 CASE_FLT_FN (BUILT_IN_LLRINT):
11343 CASE_FLT_FN (BUILT_IN_LLROUND):
11344 CASE_FLT_FN (BUILT_IN_LOG):
11345 CASE_FLT_FN (BUILT_IN_LOG10):
11346 CASE_FLT_FN (BUILT_IN_LOG1P):
11347 CASE_FLT_FN (BUILT_IN_LOG2):
11348 CASE_FLT_FN (BUILT_IN_LOGB):
11349 CASE_FLT_FN (BUILT_IN_LRINT):
11350 CASE_FLT_FN (BUILT_IN_LROUND):
11351 CASE_FLT_FN (BUILT_IN_MODF):
11352 CASE_FLT_FN (BUILT_IN_NAN):
11353 CASE_FLT_FN (BUILT_IN_NEARBYINT):
11354 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
11355 CASE_FLT_FN (BUILT_IN_NEXTAFTER):
11356 CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
11357 CASE_FLT_FN (BUILT_IN_POW):
11358 CASE_FLT_FN (BUILT_IN_REMAINDER):
11359 CASE_FLT_FN (BUILT_IN_REMQUO):
11360 CASE_FLT_FN (BUILT_IN_RINT):
11361 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
11362 CASE_FLT_FN (BUILT_IN_ROUND):
11363 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
11364 CASE_FLT_FN (BUILT_IN_SCALBLN):
11365 CASE_FLT_FN (BUILT_IN_SCALBN):
11366 CASE_FLT_FN (BUILT_IN_SIN):
11367 CASE_FLT_FN (BUILT_IN_SINH):
11368 CASE_FLT_FN (BUILT_IN_SINCOS):
11369 CASE_FLT_FN (BUILT_IN_SQRT):
11370 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
11371 CASE_FLT_FN (BUILT_IN_TAN):
11372 CASE_FLT_FN (BUILT_IN_TANH):
11373 CASE_FLT_FN (BUILT_IN_TGAMMA):
11374 CASE_FLT_FN (BUILT_IN_TRUNC):
11375 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
11376 return true;
11377 default:
11378 break;
11380 return false;