d: Merge upstream dmd 56589f0f4, druntime 651389b5, phobos 1516ecad9.
[official-gcc.git] / gcc / builtins.cc
blobe6816d5c86550b724e89aad834ad3314d555a6b4
1 /* Expand builtin functions.
2 Copyright (C) 1988-2022 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.cc instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-access.h"
47 #include "stor-layout.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "tree-object-size.h"
51 #include "tree-ssa-strlen.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-iterator.h"
71 #include "gimple-fold.h"
72 #include "intl.h"
73 #include "file-prefix-map.h" /* remap_macro_filename() */
74 #include "gomp-constants.h"
75 #include "omp-general.h"
76 #include "tree-dfa.h"
77 #include "gimple-ssa.h"
78 #include "tree-ssa-live.h"
79 #include "tree-outof-ssa.h"
80 #include "attr-fnspec.h"
81 #include "demangle.h"
82 #include "gimple-range.h"
83 #include "pointer-query.h"
85 struct target_builtins default_target_builtins;
86 #if SWITCHABLE_TARGET
87 struct target_builtins *this_target_builtins = &default_target_builtins;
88 #endif
90 /* Define the names of the builtin function types and codes. */
91 const char *const built_in_class_names[BUILT_IN_LAST]
92 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
94 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
95 const char * built_in_names[(int) END_BUILTINS] =
97 #include "builtins.def"
100 /* Setup an array of builtin_info_type, make sure each element decl is
101 initialized to NULL_TREE. */
102 builtin_info_type builtin_info[(int)END_BUILTINS];
104 /* Non-zero if __builtin_constant_p should be folded right away. */
105 bool force_folding_builtin_constant_p;
107 static int target_char_cast (tree, char *);
108 static int apply_args_size (void);
109 static int apply_result_size (void);
110 static rtx result_vector (int, rtx);
111 static void expand_builtin_prefetch (tree);
112 static rtx expand_builtin_apply_args (void);
113 static rtx expand_builtin_apply_args_1 (void);
114 static rtx expand_builtin_apply (rtx, rtx, rtx);
115 static void expand_builtin_return (rtx);
116 static enum type_class type_to_class (tree);
117 static rtx expand_builtin_classify_type (tree);
118 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
119 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
120 static rtx expand_builtin_interclass_mathfn (tree, rtx);
121 static rtx expand_builtin_sincos (tree);
122 static rtx expand_builtin_fegetround (tree, rtx, machine_mode);
123 static rtx expand_builtin_feclear_feraise_except (tree, rtx, machine_mode,
124 optab);
125 static rtx expand_builtin_cexpi (tree, rtx);
126 static rtx expand_builtin_int_roundingfn (tree, rtx);
127 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
128 static rtx expand_builtin_next_arg (void);
129 static rtx expand_builtin_va_start (tree);
130 static rtx expand_builtin_va_end (tree);
131 static rtx expand_builtin_va_copy (tree);
132 static rtx inline_expand_builtin_bytecmp (tree, rtx);
133 static rtx expand_builtin_strcmp (tree, rtx);
134 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
135 static rtx expand_builtin_memcpy (tree, rtx);
136 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
137 rtx target, tree exp,
138 memop_ret retmode,
139 bool might_overlap);
140 static rtx expand_builtin_memmove (tree, rtx);
141 static rtx expand_builtin_mempcpy (tree, rtx);
142 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
143 static rtx expand_builtin_strcpy (tree, rtx);
144 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
145 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
146 static rtx expand_builtin_strncpy (tree, rtx);
147 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
148 static rtx expand_builtin_bzero (tree);
149 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
150 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
151 static rtx expand_builtin_alloca (tree);
152 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
153 static rtx expand_builtin_frame_address (tree, tree);
154 static tree stabilize_va_list_loc (location_t, tree, int);
155 static rtx expand_builtin_expect (tree, rtx);
156 static rtx expand_builtin_expect_with_probability (tree, rtx);
157 static tree fold_builtin_constant_p (tree);
158 static tree fold_builtin_classify_type (tree);
159 static tree fold_builtin_strlen (location_t, tree, tree, tree);
160 static tree fold_builtin_inf (location_t, tree, int);
161 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
162 static bool validate_arg (const_tree, enum tree_code code);
163 static rtx expand_builtin_fabs (tree, rtx, rtx);
164 static rtx expand_builtin_signbit (tree, rtx);
165 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
166 static tree fold_builtin_isascii (location_t, tree);
167 static tree fold_builtin_toascii (location_t, tree);
168 static tree fold_builtin_isdigit (location_t, tree);
169 static tree fold_builtin_fabs (location_t, tree, tree);
170 static tree fold_builtin_abs (location_t, tree, tree);
171 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
172 enum tree_code);
173 static tree fold_builtin_varargs (location_t, tree, tree*, int);
175 static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
176 static tree fold_builtin_strspn (location_t, tree, tree, tree);
177 static tree fold_builtin_strcspn (location_t, tree, tree, tree);
179 static rtx expand_builtin_object_size (tree);
180 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
181 enum built_in_function);
182 static void maybe_emit_chk_warning (tree, enum built_in_function);
183 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
184 static tree fold_builtin_object_size (tree, tree, enum built_in_function);
186 unsigned HOST_WIDE_INT target_newline;
187 unsigned HOST_WIDE_INT target_percent;
188 static unsigned HOST_WIDE_INT target_c;
189 static unsigned HOST_WIDE_INT target_s;
190 char target_percent_c[3];
191 char target_percent_s[3];
192 char target_percent_s_newline[4];
193 static tree do_mpfr_remquo (tree, tree, tree);
194 static tree do_mpfr_lgamma_r (tree, tree, tree);
195 static void expand_builtin_sync_synchronize (void);
197 /* Return true if NAME starts with __builtin_ or __sync_. */
199 static bool
200 is_builtin_name (const char *name)
202 return (startswith (name, "__builtin_")
203 || startswith (name, "__sync_")
204 || startswith (name, "__atomic_"));
207 /* Return true if NODE should be considered for inline expansion regardless
208 of the optimization level. This means whenever a function is invoked with
209 its "internal" name, which normally contains the prefix "__builtin". */
211 bool
212 called_as_built_in (tree node)
214 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
215 we want the name used to call the function, not the name it
216 will have. */
217 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
218 return is_builtin_name (name);
221 /* Compute values M and N such that M divides (address of EXP - N) and such
222 that N < M. If these numbers can be determined, store M in alignp and N in
223 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
224 *alignp and any bit-offset to *bitposp.
226 Note that the address (and thus the alignment) computed here is based
227 on the address to which a symbol resolves, whereas DECL_ALIGN is based
228 on the address at which an object is actually located. These two
229 addresses are not always the same. For example, on ARM targets,
230 the address &foo of a Thumb function foo() has the lowest bit set,
231 whereas foo() itself starts on an even address.
233 If ADDR_P is true we are taking the address of the memory reference EXP
234 and thus cannot rely on the access taking place. */
236 bool
237 get_object_alignment_2 (tree exp, unsigned int *alignp,
238 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
240 poly_int64 bitsize, bitpos;
241 tree offset;
242 machine_mode mode;
243 int unsignedp, reversep, volatilep;
244 unsigned int align = BITS_PER_UNIT;
245 bool known_alignment = false;
247 /* Get the innermost object and the constant (bitpos) and possibly
248 variable (offset) offset of the access. */
249 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
250 &unsignedp, &reversep, &volatilep);
252 /* Extract alignment information from the innermost object and
253 possibly adjust bitpos and offset. */
254 if (TREE_CODE (exp) == FUNCTION_DECL)
256 /* Function addresses can encode extra information besides their
257 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
258 allows the low bit to be used as a virtual bit, we know
259 that the address itself must be at least 2-byte aligned. */
260 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
261 align = 2 * BITS_PER_UNIT;
263 else if (TREE_CODE (exp) == LABEL_DECL)
265 else if (TREE_CODE (exp) == CONST_DECL)
267 /* The alignment of a CONST_DECL is determined by its initializer. */
268 exp = DECL_INITIAL (exp);
269 align = TYPE_ALIGN (TREE_TYPE (exp));
270 if (CONSTANT_CLASS_P (exp))
271 align = targetm.constant_alignment (exp, align);
273 known_alignment = true;
275 else if (DECL_P (exp))
277 align = DECL_ALIGN (exp);
278 known_alignment = true;
280 else if (TREE_CODE (exp) == INDIRECT_REF
281 || TREE_CODE (exp) == MEM_REF
282 || TREE_CODE (exp) == TARGET_MEM_REF)
284 tree addr = TREE_OPERAND (exp, 0);
285 unsigned ptr_align;
286 unsigned HOST_WIDE_INT ptr_bitpos;
287 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
289 /* If the address is explicitely aligned, handle that. */
290 if (TREE_CODE (addr) == BIT_AND_EXPR
291 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
293 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
294 ptr_bitmask *= BITS_PER_UNIT;
295 align = least_bit_hwi (ptr_bitmask);
296 addr = TREE_OPERAND (addr, 0);
299 known_alignment
300 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
301 align = MAX (ptr_align, align);
303 /* Re-apply explicit alignment to the bitpos. */
304 ptr_bitpos &= ptr_bitmask;
306 /* The alignment of the pointer operand in a TARGET_MEM_REF
307 has to take the variable offset parts into account. */
308 if (TREE_CODE (exp) == TARGET_MEM_REF)
310 if (TMR_INDEX (exp))
312 unsigned HOST_WIDE_INT step = 1;
313 if (TMR_STEP (exp))
314 step = TREE_INT_CST_LOW (TMR_STEP (exp));
315 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
317 if (TMR_INDEX2 (exp))
318 align = BITS_PER_UNIT;
319 known_alignment = false;
322 /* When EXP is an actual memory reference then we can use
323 TYPE_ALIGN of a pointer indirection to derive alignment.
324 Do so only if get_pointer_alignment_1 did not reveal absolute
325 alignment knowledge and if using that alignment would
326 improve the situation. */
327 unsigned int talign;
328 if (!addr_p && !known_alignment
329 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
330 && talign > align)
331 align = talign;
332 else
334 /* Else adjust bitpos accordingly. */
335 bitpos += ptr_bitpos;
336 if (TREE_CODE (exp) == MEM_REF
337 || TREE_CODE (exp) == TARGET_MEM_REF)
338 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
341 else if (TREE_CODE (exp) == STRING_CST)
343 /* STRING_CST are the only constant objects we allow to be not
344 wrapped inside a CONST_DECL. */
345 align = TYPE_ALIGN (TREE_TYPE (exp));
346 if (CONSTANT_CLASS_P (exp))
347 align = targetm.constant_alignment (exp, align);
349 known_alignment = true;
352 /* If there is a non-constant offset part extract the maximum
353 alignment that can prevail. */
354 if (offset)
356 unsigned int trailing_zeros = tree_ctz (offset);
357 if (trailing_zeros < HOST_BITS_PER_INT)
359 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
360 if (inner)
361 align = MIN (align, inner);
365 /* Account for the alignment of runtime coefficients, so that the constant
366 bitpos is guaranteed to be accurate. */
367 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
368 if (alt_align != 0 && alt_align < align)
370 align = alt_align;
371 known_alignment = false;
374 *alignp = align;
375 *bitposp = bitpos.coeffs[0] & (align - 1);
376 return known_alignment;
379 /* For a memory reference expression EXP compute values M and N such that M
380 divides (&EXP - N) and such that N < M. If these numbers can be determined,
381 store M in alignp and N in *BITPOSP and return true. Otherwise return false
382 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
384 bool
385 get_object_alignment_1 (tree exp, unsigned int *alignp,
386 unsigned HOST_WIDE_INT *bitposp)
388 /* Strip a WITH_SIZE_EXPR, get_inner_reference doesn't know how to deal
389 with it. */
390 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
391 exp = TREE_OPERAND (exp, 0);
392 return get_object_alignment_2 (exp, alignp, bitposp, false);
395 /* Return the alignment in bits of EXP, an object. */
397 unsigned int
398 get_object_alignment (tree exp)
400 unsigned HOST_WIDE_INT bitpos = 0;
401 unsigned int align;
403 get_object_alignment_1 (exp, &align, &bitpos);
405 /* align and bitpos now specify known low bits of the pointer.
406 ptr & (align - 1) == bitpos. */
408 if (bitpos != 0)
409 align = least_bit_hwi (bitpos);
410 return align;
413 /* For a pointer valued expression EXP compute values M and N such that M
414 divides (EXP - N) and such that N < M. If these numbers can be determined,
415 store M in alignp and N in *BITPOSP and return true. Return false if
416 the results are just a conservative approximation.
418 If EXP is not a pointer, false is returned too. */
420 bool
421 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
422 unsigned HOST_WIDE_INT *bitposp)
424 STRIP_NOPS (exp);
426 if (TREE_CODE (exp) == ADDR_EXPR)
427 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
428 alignp, bitposp, true);
429 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
431 unsigned int align;
432 unsigned HOST_WIDE_INT bitpos;
433 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
434 &align, &bitpos);
435 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
436 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
437 else
439 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
440 if (trailing_zeros < HOST_BITS_PER_INT)
442 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
443 if (inner)
444 align = MIN (align, inner);
447 *alignp = align;
448 *bitposp = bitpos & (align - 1);
449 return res;
451 else if (TREE_CODE (exp) == SSA_NAME
452 && POINTER_TYPE_P (TREE_TYPE (exp)))
454 unsigned int ptr_align, ptr_misalign;
455 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
457 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
459 *bitposp = ptr_misalign * BITS_PER_UNIT;
460 *alignp = ptr_align * BITS_PER_UNIT;
461 /* Make sure to return a sensible alignment when the multiplication
462 by BITS_PER_UNIT overflowed. */
463 if (*alignp == 0)
464 *alignp = 1u << (HOST_BITS_PER_INT - 1);
465 /* We cannot really tell whether this result is an approximation. */
466 return false;
468 else
470 *bitposp = 0;
471 *alignp = BITS_PER_UNIT;
472 return false;
475 else if (TREE_CODE (exp) == INTEGER_CST)
477 *alignp = BIGGEST_ALIGNMENT;
478 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
479 & (BIGGEST_ALIGNMENT - 1));
480 return true;
483 *bitposp = 0;
484 *alignp = BITS_PER_UNIT;
485 return false;
488 /* Return the alignment in bits of EXP, a pointer valued expression.
489 The alignment returned is, by default, the alignment of the thing that
490 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
492 Otherwise, look at the expression to see if we can do better, i.e., if the
493 expression is actually pointing at an object whose alignment is tighter. */
495 unsigned int
496 get_pointer_alignment (tree exp)
498 unsigned HOST_WIDE_INT bitpos = 0;
499 unsigned int align;
501 get_pointer_alignment_1 (exp, &align, &bitpos);
503 /* align and bitpos now specify known low bits of the pointer.
504 ptr & (align - 1) == bitpos. */
506 if (bitpos != 0)
507 align = least_bit_hwi (bitpos);
509 return align;
512 /* Return the number of leading non-zero elements in the sequence
513 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
514 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
516 unsigned
517 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
519 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
521 unsigned n;
523 if (eltsize == 1)
525 /* Optimize the common case of plain char. */
526 for (n = 0; n < maxelts; n++)
528 const char *elt = (const char*) ptr + n;
529 if (!*elt)
530 break;
533 else
535 for (n = 0; n < maxelts; n++)
537 const char *elt = (const char*) ptr + n * eltsize;
538 if (!memcmp (elt, "\0\0\0\0", eltsize))
539 break;
542 return n;
545 /* Compute the length of a null-terminated character string or wide
546 character string handling character sizes of 1, 2, and 4 bytes.
547 TREE_STRING_LENGTH is not the right way because it evaluates to
548 the size of the character array in bytes (as opposed to characters)
549 and because it can contain a zero byte in the middle.
551 ONLY_VALUE should be nonzero if the result is not going to be emitted
552 into the instruction stream and zero if it is going to be expanded.
553 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
554 is returned, otherwise NULL, since
555 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
556 evaluate the side-effects.
558 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
559 accesses. Note that this implies the result is not going to be emitted
560 into the instruction stream.
562 Additional information about the string accessed may be recorded
563 in DATA. For example, if ARG references an unterminated string,
564 then the declaration will be stored in the DECL field. If the
565 length of the unterminated string can be determined, it'll be
566 stored in the LEN field. Note this length could well be different
567 than what a C strlen call would return.
569 ELTSIZE is 1 for normal single byte character strings, and 2 or
570 4 for wide characer strings. ELTSIZE is by default 1.
572 The value returned is of type `ssizetype'. */
574 tree
575 c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
577 /* If we were not passed a DATA pointer, then get one to a local
578 structure. That avoids having to check DATA for NULL before
579 each time we want to use it. */
580 c_strlen_data local_strlen_data = { };
581 if (!data)
582 data = &local_strlen_data;
584 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
586 tree src = STRIP_NOPS (arg);
587 if (TREE_CODE (src) == COND_EXPR
588 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
590 tree len1, len2;
592 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
593 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
594 if (tree_int_cst_equal (len1, len2))
595 return len1;
598 if (TREE_CODE (src) == COMPOUND_EXPR
599 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
600 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
602 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
604 /* Offset from the beginning of the string in bytes. */
605 tree byteoff;
606 tree memsize;
607 tree decl;
608 src = string_constant (src, &byteoff, &memsize, &decl);
609 if (src == 0)
610 return NULL_TREE;
612 /* Determine the size of the string element. */
613 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
614 return NULL_TREE;
616 /* Set MAXELTS to ARRAY_SIZE (SRC) - 1, the maximum possible
617 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
618 in case the latter is less than the size of the array, such as when
619 SRC refers to a short string literal used to initialize a large array.
620 In that case, the elements of the array after the terminating NUL are
621 all NUL. */
622 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
623 strelts = strelts / eltsize;
625 if (!tree_fits_uhwi_p (memsize))
626 return NULL_TREE;
628 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
630 /* PTR can point to the byte representation of any string type, including
631 char* and wchar_t*. */
632 const char *ptr = TREE_STRING_POINTER (src);
634 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
636 /* The code below works only for single byte character types. */
637 if (eltsize != 1)
638 return NULL_TREE;
640 /* If the string has an internal NUL character followed by any
641 non-NUL characters (e.g., "foo\0bar"), we can't compute
642 the offset to the following NUL if we don't know where to
643 start searching for it. */
644 unsigned len = string_length (ptr, eltsize, strelts);
646 /* Return when an embedded null character is found or none at all.
647 In the latter case, set the DECL/LEN field in the DATA structure
648 so that callers may examine them. */
649 if (len + 1 < strelts)
650 return NULL_TREE;
651 else if (len >= maxelts)
653 data->decl = decl;
654 data->off = byteoff;
655 data->minlen = ssize_int (len);
656 return NULL_TREE;
659 /* For empty strings the result should be zero. */
660 if (len == 0)
661 return ssize_int (0);
663 /* We don't know the starting offset, but we do know that the string
664 has no internal zero bytes. If the offset falls within the bounds
665 of the string subtract the offset from the length of the string,
666 and return that. Otherwise the length is zero. Take care to
667 use SAVE_EXPR in case the OFFSET has side-effects. */
668 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
669 : byteoff;
670 offsave = fold_convert_loc (loc, sizetype, offsave);
671 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
672 size_int (len));
673 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
674 offsave);
675 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
676 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
677 build_zero_cst (ssizetype));
680 /* Offset from the beginning of the string in elements. */
681 HOST_WIDE_INT eltoff;
683 /* We have a known offset into the string. Start searching there for
684 a null character if we can represent it as a single HOST_WIDE_INT. */
685 if (byteoff == 0)
686 eltoff = 0;
687 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
688 eltoff = -1;
689 else
690 eltoff = tree_to_uhwi (byteoff) / eltsize;
692 /* If the offset is known to be out of bounds, warn, and call strlen at
693 runtime. */
694 if (eltoff < 0 || eltoff >= maxelts)
696 /* Suppress multiple warnings for propagated constant strings. */
697 if (only_value != 2
698 && !warning_suppressed_p (arg, OPT_Warray_bounds)
699 && warning_at (loc, OPT_Warray_bounds,
700 "offset %qwi outside bounds of constant string",
701 eltoff))
703 if (decl)
704 inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
705 suppress_warning (arg, OPT_Warray_bounds);
707 return NULL_TREE;
710 /* If eltoff is larger than strelts but less than maxelts the
711 string length is zero, since the excess memory will be zero. */
712 if (eltoff > strelts)
713 return ssize_int (0);
715 /* Use strlen to search for the first zero byte. Since any strings
716 constructed with build_string will have nulls appended, we win even
717 if we get handed something like (char[4])"abcd".
719 Since ELTOFF is our starting index into the string, no further
720 calculation is needed. */
721 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
722 strelts - eltoff);
724 /* Don't know what to return if there was no zero termination.
725 Ideally this would turn into a gcc_checking_assert over time.
726 Set DECL/LEN so callers can examine them. */
727 if (len >= maxelts - eltoff)
729 data->decl = decl;
730 data->off = byteoff;
731 data->minlen = ssize_int (len);
732 return NULL_TREE;
735 return ssize_int (len);
738 /* Return a constant integer corresponding to target reading
739 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
740 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
741 are assumed to be zero, otherwise it reads as many characters
742 as needed. */
745 c_readstr (const char *str, scalar_int_mode mode,
746 bool null_terminated_p/*=true*/)
748 HOST_WIDE_INT ch;
749 unsigned int i, j;
750 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
752 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
753 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
754 / HOST_BITS_PER_WIDE_INT;
756 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
757 for (i = 0; i < len; i++)
758 tmp[i] = 0;
760 ch = 1;
761 for (i = 0; i < GET_MODE_SIZE (mode); i++)
763 j = i;
764 if (WORDS_BIG_ENDIAN)
765 j = GET_MODE_SIZE (mode) - i - 1;
766 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
767 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
768 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
769 j *= BITS_PER_UNIT;
771 if (ch || !null_terminated_p)
772 ch = (unsigned char) str[i];
773 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
776 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
777 return immed_wide_int_const (c, mode);
780 /* Cast a target constant CST to target CHAR and if that value fits into
781 host char type, return zero and put that value into variable pointed to by
782 P. */
784 static int
785 target_char_cast (tree cst, char *p)
787 unsigned HOST_WIDE_INT val, hostval;
789 if (TREE_CODE (cst) != INTEGER_CST
790 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
791 return 1;
793 /* Do not care if it fits or not right here. */
794 val = TREE_INT_CST_LOW (cst);
796 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
797 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
799 hostval = val;
800 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
801 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
803 if (val != hostval)
804 return 1;
806 *p = hostval;
807 return 0;
810 /* Similar to save_expr, but assumes that arbitrary code is not executed
811 in between the multiple evaluations. In particular, we assume that a
812 non-addressable local variable will not be modified. */
814 static tree
815 builtin_save_expr (tree exp)
817 if (TREE_CODE (exp) == SSA_NAME
818 || (TREE_ADDRESSABLE (exp) == 0
819 && (TREE_CODE (exp) == PARM_DECL
820 || (VAR_P (exp) && !TREE_STATIC (exp)))))
821 return exp;
823 return save_expr (exp);
826 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
827 times to get the address of either a higher stack frame, or a return
828 address located within it (depending on FNDECL_CODE). */
830 static rtx
831 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
833 int i;
834 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
835 if (tem == NULL_RTX)
837 /* For a zero count with __builtin_return_address, we don't care what
838 frame address we return, because target-specific definitions will
839 override us. Therefore frame pointer elimination is OK, and using
840 the soft frame pointer is OK.
842 For a nonzero count, or a zero count with __builtin_frame_address,
843 we require a stable offset from the current frame pointer to the
844 previous one, so we must use the hard frame pointer, and
845 we must disable frame pointer elimination. */
846 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
847 tem = frame_pointer_rtx;
848 else
850 tem = hard_frame_pointer_rtx;
852 /* Tell reload not to eliminate the frame pointer. */
853 crtl->accesses_prior_frames = 1;
857 if (count > 0)
858 SETUP_FRAME_ADDRESSES ();
860 /* On the SPARC, the return address is not in the frame, it is in a
861 register. There is no way to access it off of the current frame
862 pointer, but it can be accessed off the previous frame pointer by
863 reading the value from the register window save area. */
864 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
865 count--;
867 /* Scan back COUNT frames to the specified frame. */
868 for (i = 0; i < count; i++)
870 /* Assume the dynamic chain pointer is in the word that the
871 frame address points to, unless otherwise specified. */
872 tem = DYNAMIC_CHAIN_ADDRESS (tem);
873 tem = memory_address (Pmode, tem);
874 tem = gen_frame_mem (Pmode, tem);
875 tem = copy_to_reg (tem);
878 /* For __builtin_frame_address, return what we've got. But, on
879 the SPARC for example, we may have to add a bias. */
880 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
881 return FRAME_ADDR_RTX (tem);
883 /* For __builtin_return_address, get the return address from that frame. */
884 #ifdef RETURN_ADDR_RTX
885 tem = RETURN_ADDR_RTX (count, tem);
886 #else
887 tem = memory_address (Pmode,
888 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
889 tem = gen_frame_mem (Pmode, tem);
890 #endif
891 return tem;
894 /* Alias set used for setjmp buffer. */
895 static alias_set_type setjmp_alias_set = -1;
897 /* Construct the leading half of a __builtin_setjmp call. Control will
898 return to RECEIVER_LABEL. This is also called directly by the SJLJ
899 exception handling code. */
901 void
902 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
904 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
905 rtx stack_save;
906 rtx mem;
908 if (setjmp_alias_set == -1)
909 setjmp_alias_set = new_alias_set ();
911 buf_addr = convert_memory_address (Pmode, buf_addr);
913 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
915 /* We store the frame pointer and the address of receiver_label in
916 the buffer and use the rest of it for the stack save area, which
917 is machine-dependent. */
919 mem = gen_rtx_MEM (Pmode, buf_addr);
920 set_mem_alias_set (mem, setjmp_alias_set);
921 emit_move_insn (mem, hard_frame_pointer_rtx);
923 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
924 GET_MODE_SIZE (Pmode))),
925 set_mem_alias_set (mem, setjmp_alias_set);
927 emit_move_insn (validize_mem (mem),
928 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
930 stack_save = gen_rtx_MEM (sa_mode,
931 plus_constant (Pmode, buf_addr,
932 2 * GET_MODE_SIZE (Pmode)));
933 set_mem_alias_set (stack_save, setjmp_alias_set);
934 emit_stack_save (SAVE_NONLOCAL, &stack_save);
936 /* If there is further processing to do, do it. */
937 if (targetm.have_builtin_setjmp_setup ())
938 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
940 /* We have a nonlocal label. */
941 cfun->has_nonlocal_label = 1;
944 /* Construct the trailing part of a __builtin_setjmp call. This is
945 also called directly by the SJLJ exception handling code.
946 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
948 void
949 expand_builtin_setjmp_receiver (rtx receiver_label)
951 rtx chain;
953 /* Mark the FP as used when we get here, so we have to make sure it's
954 marked as used by this function. */
955 emit_use (hard_frame_pointer_rtx);
957 /* Mark the static chain as clobbered here so life information
958 doesn't get messed up for it. */
959 chain = rtx_for_static_chain (current_function_decl, true);
960 if (chain && REG_P (chain))
961 emit_clobber (chain);
963 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
965 /* If the argument pointer can be eliminated in favor of the
966 frame pointer, we don't need to restore it. We assume here
967 that if such an elimination is present, it can always be used.
968 This is the case on all known machines; if we don't make this
969 assumption, we do unnecessary saving on many machines. */
970 size_t i;
971 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
973 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
974 if (elim_regs[i].from == ARG_POINTER_REGNUM
975 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
976 break;
978 if (i == ARRAY_SIZE (elim_regs))
980 /* Now restore our arg pointer from the address at which it
981 was saved in our stack frame. */
982 emit_move_insn (crtl->args.internal_arg_pointer,
983 copy_to_reg (get_arg_pointer_save_area ()));
987 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
988 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
989 else if (targetm.have_nonlocal_goto_receiver ())
990 emit_insn (targetm.gen_nonlocal_goto_receiver ());
991 else
992 { /* Nothing */ }
994 /* We must not allow the code we just generated to be reordered by
995 scheduling. Specifically, the update of the frame pointer must
996 happen immediately, not later. */
997 emit_insn (gen_blockage ());
1000 /* __builtin_longjmp is passed a pointer to an array of five words (not
1001 all will be used on all machines). It operates similarly to the C
1002 library function of the same name, but is more efficient. Much of
1003 the code below is copied from the handling of non-local gotos. */
1005 static void
1006 expand_builtin_longjmp (rtx buf_addr, rtx value)
1008 rtx fp, lab, stack;
1009 rtx_insn *insn, *last;
1010 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1012 /* DRAP is needed for stack realign if longjmp is expanded to current
1013 function */
1014 if (SUPPORTS_STACK_ALIGNMENT)
1015 crtl->need_drap = true;
1017 if (setjmp_alias_set == -1)
1018 setjmp_alias_set = new_alias_set ();
1020 buf_addr = convert_memory_address (Pmode, buf_addr);
1022 buf_addr = force_reg (Pmode, buf_addr);
1024 /* We require that the user must pass a second argument of 1, because
1025 that is what builtin_setjmp will return. */
1026 gcc_assert (value == const1_rtx);
1028 last = get_last_insn ();
1029 if (targetm.have_builtin_longjmp ())
1030 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1031 else
1033 fp = gen_rtx_MEM (Pmode, buf_addr);
1034 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1035 GET_MODE_SIZE (Pmode)));
1037 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1038 2 * GET_MODE_SIZE (Pmode)));
1039 set_mem_alias_set (fp, setjmp_alias_set);
1040 set_mem_alias_set (lab, setjmp_alias_set);
1041 set_mem_alias_set (stack, setjmp_alias_set);
1043 /* Pick up FP, label, and SP from the block and jump. This code is
1044 from expand_goto in stmt.cc; see there for detailed comments. */
1045 if (targetm.have_nonlocal_goto ())
1046 /* We have to pass a value to the nonlocal_goto pattern that will
1047 get copied into the static_chain pointer, but it does not matter
1048 what that value is, because builtin_setjmp does not use it. */
1049 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1050 else
1052 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1053 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1055 lab = copy_to_reg (lab);
1057 /* Restore the frame pointer and stack pointer. We must use a
1058 temporary since the setjmp buffer may be a local. */
1059 fp = copy_to_reg (fp);
1060 emit_stack_restore (SAVE_NONLOCAL, stack);
1062 /* Ensure the frame pointer move is not optimized. */
1063 emit_insn (gen_blockage ());
1064 emit_clobber (hard_frame_pointer_rtx);
1065 emit_clobber (frame_pointer_rtx);
1066 emit_move_insn (hard_frame_pointer_rtx, fp);
1068 emit_use (hard_frame_pointer_rtx);
1069 emit_use (stack_pointer_rtx);
1070 emit_indirect_jump (lab);
1074 /* Search backwards and mark the jump insn as a non-local goto.
1075 Note that this precludes the use of __builtin_longjmp to a
1076 __builtin_setjmp target in the same function. However, we've
1077 already cautioned the user that these functions are for
1078 internal exception handling use only. */
1079 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1081 gcc_assert (insn != last);
1083 if (JUMP_P (insn))
1085 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1086 break;
1088 else if (CALL_P (insn))
1089 break;
1093 static inline bool
1094 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1096 return (iter->i < iter->n);
1099 /* This function validates the types of a function call argument list
1100 against a specified list of tree_codes. If the last specifier is a 0,
1101 that represents an ellipsis, otherwise the last specifier must be a
1102 VOID_TYPE. */
1104 static bool
1105 validate_arglist (const_tree callexpr, ...)
1107 enum tree_code code;
1108 bool res = 0;
1109 va_list ap;
1110 const_call_expr_arg_iterator iter;
1111 const_tree arg;
1113 va_start (ap, callexpr);
1114 init_const_call_expr_arg_iterator (callexpr, &iter);
1116 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1117 tree fn = CALL_EXPR_FN (callexpr);
1118 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1120 for (unsigned argno = 1; ; ++argno)
1122 code = (enum tree_code) va_arg (ap, int);
1124 switch (code)
1126 case 0:
1127 /* This signifies an ellipses, any further arguments are all ok. */
1128 res = true;
1129 goto end;
1130 case VOID_TYPE:
1131 /* This signifies an endlink, if no arguments remain, return
1132 true, otherwise return false. */
1133 res = !more_const_call_expr_args_p (&iter);
1134 goto end;
1135 case POINTER_TYPE:
1136 /* The actual argument must be nonnull when either the whole
1137 called function has been declared nonnull, or when the formal
1138 argument corresponding to the actual argument has been. */
1139 if (argmap
1140 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1142 arg = next_const_call_expr_arg (&iter);
1143 if (!validate_arg (arg, code) || integer_zerop (arg))
1144 goto end;
1145 break;
1147 /* FALLTHRU */
1148 default:
1149 /* If no parameters remain or the parameter's code does not
1150 match the specified code, return false. Otherwise continue
1151 checking any remaining arguments. */
1152 arg = next_const_call_expr_arg (&iter);
1153 if (!validate_arg (arg, code))
1154 goto end;
1155 break;
1159 /* We need gotos here since we can only have one VA_CLOSE in a
1160 function. */
1161 end: ;
1162 va_end (ap);
1164 BITMAP_FREE (argmap);
1166 return res;
1169 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1170 and the address of the save area. */
1172 static rtx
1173 expand_builtin_nonlocal_goto (tree exp)
1175 tree t_label, t_save_area;
1176 rtx r_label, r_save_area, r_fp, r_sp;
1177 rtx_insn *insn;
1179 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1180 return NULL_RTX;
1182 t_label = CALL_EXPR_ARG (exp, 0);
1183 t_save_area = CALL_EXPR_ARG (exp, 1);
1185 r_label = expand_normal (t_label);
1186 r_label = convert_memory_address (Pmode, r_label);
1187 r_save_area = expand_normal (t_save_area);
1188 r_save_area = convert_memory_address (Pmode, r_save_area);
1189 /* Copy the address of the save location to a register just in case it was
1190 based on the frame pointer. */
1191 r_save_area = copy_to_reg (r_save_area);
1192 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1193 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1194 plus_constant (Pmode, r_save_area,
1195 GET_MODE_SIZE (Pmode)));
1197 crtl->has_nonlocal_goto = 1;
1199 /* ??? We no longer need to pass the static chain value, afaik. */
1200 if (targetm.have_nonlocal_goto ())
1201 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1202 else
1204 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1205 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1207 r_label = copy_to_reg (r_label);
1209 /* Restore the frame pointer and stack pointer. We must use a
1210 temporary since the setjmp buffer may be a local. */
1211 r_fp = copy_to_reg (r_fp);
1212 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1214 /* Ensure the frame pointer move is not optimized. */
1215 emit_insn (gen_blockage ());
1216 emit_clobber (hard_frame_pointer_rtx);
1217 emit_clobber (frame_pointer_rtx);
1218 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1220 /* USE of hard_frame_pointer_rtx added for consistency;
1221 not clear if really needed. */
1222 emit_use (hard_frame_pointer_rtx);
1223 emit_use (stack_pointer_rtx);
1225 /* If the architecture is using a GP register, we must
1226 conservatively assume that the target function makes use of it.
1227 The prologue of functions with nonlocal gotos must therefore
1228 initialize the GP register to the appropriate value, and we
1229 must then make sure that this value is live at the point
1230 of the jump. (Note that this doesn't necessarily apply
1231 to targets with a nonlocal_goto pattern; they are free
1232 to implement it in their own way. Note also that this is
1233 a no-op if the GP register is a global invariant.) */
1234 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1235 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1236 emit_use (pic_offset_table_rtx);
1238 emit_indirect_jump (r_label);
1241 /* Search backwards to the jump insn and mark it as a
1242 non-local goto. */
1243 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1245 if (JUMP_P (insn))
1247 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1248 break;
1250 else if (CALL_P (insn))
1251 break;
1254 return const0_rtx;
1257 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1258 (not all will be used on all machines) that was passed to __builtin_setjmp.
1259 It updates the stack pointer in that block to the current value. This is
1260 also called directly by the SJLJ exception handling code. */
1262 void
1263 expand_builtin_update_setjmp_buf (rtx buf_addr)
1265 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1266 buf_addr = convert_memory_address (Pmode, buf_addr);
1267 rtx stack_save
1268 = gen_rtx_MEM (sa_mode,
1269 memory_address
1270 (sa_mode,
1271 plus_constant (Pmode, buf_addr,
1272 2 * GET_MODE_SIZE (Pmode))));
1274 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1277 /* Expand a call to __builtin_prefetch. For a target that does not support
1278 data prefetch, evaluate the memory address argument in case it has side
1279 effects. */
1281 static void
1282 expand_builtin_prefetch (tree exp)
1284 tree arg0, arg1, arg2;
1285 int nargs;
1286 rtx op0, op1, op2;
1288 if (!validate_arglist (exp, POINTER_TYPE, 0))
1289 return;
1291 arg0 = CALL_EXPR_ARG (exp, 0);
1293 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1294 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1295 locality). */
1296 nargs = call_expr_nargs (exp);
1297 if (nargs > 1)
1298 arg1 = CALL_EXPR_ARG (exp, 1);
1299 else
1300 arg1 = integer_zero_node;
1301 if (nargs > 2)
1302 arg2 = CALL_EXPR_ARG (exp, 2);
1303 else
1304 arg2 = integer_three_node;
1306 /* Argument 0 is an address. */
1307 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1309 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1310 if (TREE_CODE (arg1) != INTEGER_CST)
1312 error ("second argument to %<__builtin_prefetch%> must be a constant");
1313 arg1 = integer_zero_node;
1315 op1 = expand_normal (arg1);
1316 /* Argument 1 must be either zero or one. */
1317 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1319 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1320 " using zero");
1321 op1 = const0_rtx;
1324 /* Argument 2 (locality) must be a compile-time constant int. */
1325 if (TREE_CODE (arg2) != INTEGER_CST)
1327 error ("third argument to %<__builtin_prefetch%> must be a constant");
1328 arg2 = integer_zero_node;
1330 op2 = expand_normal (arg2);
1331 /* Argument 2 must be 0, 1, 2, or 3. */
1332 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1334 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1335 op2 = const0_rtx;
1338 if (targetm.have_prefetch ())
1340 class expand_operand ops[3];
1342 create_address_operand (&ops[0], op0);
1343 create_integer_operand (&ops[1], INTVAL (op1));
1344 create_integer_operand (&ops[2], INTVAL (op2));
1345 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1346 return;
1349 /* Don't do anything with direct references to volatile memory, but
1350 generate code to handle other side effects. */
1351 if (!MEM_P (op0) && side_effects_p (op0))
1352 emit_insn (op0);
1355 /* Get a MEM rtx for expression EXP which is the address of an operand
1356 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
1357 the maximum length of the block of memory that might be accessed or
1358 NULL if unknown. */
1361 get_memory_rtx (tree exp, tree len)
1363 tree orig_exp = exp;
1364 rtx addr, mem;
1366 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1367 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1368 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1369 exp = TREE_OPERAND (exp, 0);
1371 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1372 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1374 /* Get an expression we can use to find the attributes to assign to MEM.
1375 First remove any nops. */
1376 while (CONVERT_EXPR_P (exp)
1377 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1378 exp = TREE_OPERAND (exp, 0);
1380 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1381 (as builtin stringops may alias with anything). */
1382 exp = fold_build2 (MEM_REF,
1383 build_array_type (char_type_node,
1384 build_range_type (sizetype,
1385 size_one_node, len)),
1386 exp, build_int_cst (ptr_type_node, 0));
1388 /* If the MEM_REF has no acceptable address, try to get the base object
1389 from the original address we got, and build an all-aliasing
1390 unknown-sized access to that one. */
1391 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1392 set_mem_attributes (mem, exp, 0);
1393 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1394 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1395 0))))
1397 exp = build_fold_addr_expr (exp);
1398 exp = fold_build2 (MEM_REF,
1399 build_array_type (char_type_node,
1400 build_range_type (sizetype,
1401 size_zero_node,
1402 NULL)),
1403 exp, build_int_cst (ptr_type_node, 0));
1404 set_mem_attributes (mem, exp, 0);
1406 set_mem_alias_set (mem, 0);
1407 return mem;
1410 /* Built-in functions to perform an untyped call and return. */
1412 #define apply_args_mode \
1413 (this_target_builtins->x_apply_args_mode)
1414 #define apply_result_mode \
1415 (this_target_builtins->x_apply_result_mode)
1417 /* Return the size required for the block returned by __builtin_apply_args,
1418 and initialize apply_args_mode. */
1420 static int
1421 apply_args_size (void)
1423 static int size = -1;
1424 int align;
1425 unsigned int regno;
1427 /* The values computed by this function never change. */
1428 if (size < 0)
1430 /* The first value is the incoming arg-pointer. */
1431 size = GET_MODE_SIZE (Pmode);
1433 /* The second value is the structure value address unless this is
1434 passed as an "invisible" first argument. */
1435 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1436 size += GET_MODE_SIZE (Pmode);
1438 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1439 if (FUNCTION_ARG_REGNO_P (regno))
1441 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1443 gcc_assert (mode != VOIDmode);
1445 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1446 if (size % align != 0)
1447 size = CEIL (size, align) * align;
1448 size += GET_MODE_SIZE (mode);
1449 apply_args_mode[regno] = mode;
1451 else
1453 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1456 return size;
1459 /* Return the size required for the block returned by __builtin_apply,
1460 and initialize apply_result_mode. */
1462 static int
1463 apply_result_size (void)
1465 static int size = -1;
1466 int align, regno;
1468 /* The values computed by this function never change. */
1469 if (size < 0)
1471 size = 0;
1473 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1474 if (targetm.calls.function_value_regno_p (regno))
1476 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1478 gcc_assert (mode != VOIDmode);
1480 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1481 if (size % align != 0)
1482 size = CEIL (size, align) * align;
1483 size += GET_MODE_SIZE (mode);
1484 apply_result_mode[regno] = mode;
1486 else
1487 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1489 /* Allow targets that use untyped_call and untyped_return to override
1490 the size so that machine-specific information can be stored here. */
1491 #ifdef APPLY_RESULT_SIZE
1492 size = APPLY_RESULT_SIZE;
1493 #endif
1495 return size;
1498 /* Create a vector describing the result block RESULT. If SAVEP is true,
1499 the result block is used to save the values; otherwise it is used to
1500 restore the values. */
1502 static rtx
1503 result_vector (int savep, rtx result)
1505 int regno, size, align, nelts;
1506 fixed_size_mode mode;
1507 rtx reg, mem;
1508 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1510 size = nelts = 0;
1511 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1512 if ((mode = apply_result_mode[regno]) != VOIDmode)
1514 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1515 if (size % align != 0)
1516 size = CEIL (size, align) * align;
1517 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1518 mem = adjust_address (result, mode, size);
1519 savevec[nelts++] = (savep
1520 ? gen_rtx_SET (mem, reg)
1521 : gen_rtx_SET (reg, mem));
1522 size += GET_MODE_SIZE (mode);
1524 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1527 /* Save the state required to perform an untyped call with the same
1528 arguments as were passed to the current function. */
1530 static rtx
1531 expand_builtin_apply_args_1 (void)
1533 rtx registers, tem;
1534 int size, align, regno;
1535 fixed_size_mode mode;
1536 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1538 /* Create a block where the arg-pointer, structure value address,
1539 and argument registers can be saved. */
1540 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1542 /* Walk past the arg-pointer and structure value address. */
1543 size = GET_MODE_SIZE (Pmode);
1544 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1545 size += GET_MODE_SIZE (Pmode);
1547 /* Save each register used in calling a function to the block. */
1548 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1549 if ((mode = apply_args_mode[regno]) != VOIDmode)
1551 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1552 if (size % align != 0)
1553 size = CEIL (size, align) * align;
1555 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1557 emit_move_insn (adjust_address (registers, mode, size), tem);
1558 size += GET_MODE_SIZE (mode);
1561 /* Save the arg pointer to the block. */
1562 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1563 /* We need the pointer as the caller actually passed them to us, not
1564 as we might have pretended they were passed. Make sure it's a valid
1565 operand, as emit_move_insn isn't expected to handle a PLUS. */
1566 if (STACK_GROWS_DOWNWARD)
1568 = force_operand (plus_constant (Pmode, tem,
1569 crtl->args.pretend_args_size),
1570 NULL_RTX);
1571 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1573 size = GET_MODE_SIZE (Pmode);
1575 /* Save the structure value address unless this is passed as an
1576 "invisible" first argument. */
1577 if (struct_incoming_value)
1578 emit_move_insn (adjust_address (registers, Pmode, size),
1579 copy_to_reg (struct_incoming_value));
1581 /* Return the address of the block. */
1582 return copy_addr_to_reg (XEXP (registers, 0));
1585 /* __builtin_apply_args returns block of memory allocated on
1586 the stack into which is stored the arg pointer, structure
1587 value address, static chain, and all the registers that might
1588 possibly be used in performing a function call. The code is
1589 moved to the start of the function so the incoming values are
1590 saved. */
1592 static rtx
1593 expand_builtin_apply_args (void)
1595 /* Don't do __builtin_apply_args more than once in a function.
1596 Save the result of the first call and reuse it. */
1597 if (apply_args_value != 0)
1598 return apply_args_value;
1600 /* When this function is called, it means that registers must be
1601 saved on entry to this function. So we migrate the
1602 call to the first insn of this function. */
1603 rtx temp;
1605 start_sequence ();
1606 temp = expand_builtin_apply_args_1 ();
1607 rtx_insn *seq = get_insns ();
1608 end_sequence ();
1610 apply_args_value = temp;
1612 /* Put the insns after the NOTE that starts the function.
1613 If this is inside a start_sequence, make the outer-level insn
1614 chain current, so the code is placed at the start of the
1615 function. If internal_arg_pointer is a non-virtual pseudo,
1616 it needs to be placed after the function that initializes
1617 that pseudo. */
1618 push_topmost_sequence ();
1619 if (REG_P (crtl->args.internal_arg_pointer)
1620 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1621 emit_insn_before (seq, parm_birth_insn);
1622 else
1623 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1624 pop_topmost_sequence ();
1625 return temp;
1629 /* Perform an untyped call and save the state required to perform an
1630 untyped return of whatever value was returned by the given function. */
1632 static rtx
1633 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1635 int size, align, regno;
1636 fixed_size_mode mode;
1637 rtx incoming_args, result, reg, dest, src;
1638 rtx_call_insn *call_insn;
1639 rtx old_stack_level = 0;
1640 rtx call_fusage = 0;
1641 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1643 arguments = convert_memory_address (Pmode, arguments);
1645 /* Create a block where the return registers can be saved. */
1646 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1648 /* Fetch the arg pointer from the ARGUMENTS block. */
1649 incoming_args = gen_reg_rtx (Pmode);
1650 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1651 if (!STACK_GROWS_DOWNWARD)
1652 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1653 incoming_args, 0, OPTAB_LIB_WIDEN);
1655 /* Push a new argument block and copy the arguments. Do not allow
1656 the (potential) memcpy call below to interfere with our stack
1657 manipulations. */
1658 do_pending_stack_adjust ();
1659 NO_DEFER_POP;
1661 /* Save the stack with nonlocal if available. */
1662 if (targetm.have_save_stack_nonlocal ())
1663 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1664 else
1665 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1667 /* Allocate a block of memory onto the stack and copy the memory
1668 arguments to the outgoing arguments address. We can pass TRUE
1669 as the 4th argument because we just saved the stack pointer
1670 and will restore it right after the call. */
1671 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1673 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1674 may have already set current_function_calls_alloca to true.
1675 current_function_calls_alloca won't be set if argsize is zero,
1676 so we have to guarantee need_drap is true here. */
1677 if (SUPPORTS_STACK_ALIGNMENT)
1678 crtl->need_drap = true;
1680 dest = virtual_outgoing_args_rtx;
1681 if (!STACK_GROWS_DOWNWARD)
1683 if (CONST_INT_P (argsize))
1684 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1685 else
1686 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1688 dest = gen_rtx_MEM (BLKmode, dest);
1689 set_mem_align (dest, PARM_BOUNDARY);
1690 src = gen_rtx_MEM (BLKmode, incoming_args);
1691 set_mem_align (src, PARM_BOUNDARY);
1692 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1694 /* Refer to the argument block. */
1695 apply_args_size ();
1696 arguments = gen_rtx_MEM (BLKmode, arguments);
1697 set_mem_align (arguments, PARM_BOUNDARY);
1699 /* Walk past the arg-pointer and structure value address. */
1700 size = GET_MODE_SIZE (Pmode);
1701 if (struct_value)
1702 size += GET_MODE_SIZE (Pmode);
1704 /* Restore each of the registers previously saved. Make USE insns
1705 for each of these registers for use in making the call. */
1706 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1707 if ((mode = apply_args_mode[regno]) != VOIDmode)
1709 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1710 if (size % align != 0)
1711 size = CEIL (size, align) * align;
1712 reg = gen_rtx_REG (mode, regno);
1713 emit_move_insn (reg, adjust_address (arguments, mode, size));
1714 use_reg (&call_fusage, reg);
1715 size += GET_MODE_SIZE (mode);
1718 /* Restore the structure value address unless this is passed as an
1719 "invisible" first argument. */
1720 size = GET_MODE_SIZE (Pmode);
1721 if (struct_value)
1723 rtx value = gen_reg_rtx (Pmode);
1724 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1725 emit_move_insn (struct_value, value);
1726 if (REG_P (struct_value))
1727 use_reg (&call_fusage, struct_value);
1730 /* All arguments and registers used for the call are set up by now! */
1731 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1733 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1734 and we don't want to load it into a register as an optimization,
1735 because prepare_call_address already did it if it should be done. */
1736 if (GET_CODE (function) != SYMBOL_REF)
1737 function = memory_address (FUNCTION_MODE, function);
1739 /* Generate the actual call instruction and save the return value. */
1740 if (targetm.have_untyped_call ())
1742 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1743 rtx_insn *seq = targetm.gen_untyped_call (mem, result,
1744 result_vector (1, result));
1745 for (rtx_insn *insn = seq; insn; insn = NEXT_INSN (insn))
1746 if (CALL_P (insn))
1747 add_reg_note (insn, REG_UNTYPED_CALL, NULL_RTX);
1748 emit_insn (seq);
1750 else if (targetm.have_call_value ())
1752 rtx valreg = 0;
1754 /* Locate the unique return register. It is not possible to
1755 express a call that sets more than one return register using
1756 call_value; use untyped_call for that. In fact, untyped_call
1757 only needs to save the return registers in the given block. */
1758 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1759 if ((mode = apply_result_mode[regno]) != VOIDmode)
1761 gcc_assert (!valreg); /* have_untyped_call required. */
1763 valreg = gen_rtx_REG (mode, regno);
1766 emit_insn (targetm.gen_call_value (valreg,
1767 gen_rtx_MEM (FUNCTION_MODE, function),
1768 const0_rtx, NULL_RTX, const0_rtx));
1770 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1772 else
1773 gcc_unreachable ();
1775 /* Find the CALL insn we just emitted, and attach the register usage
1776 information. */
1777 call_insn = last_call_insn ();
1778 add_function_usage_to (call_insn, call_fusage);
1780 /* Restore the stack. */
1781 if (targetm.have_save_stack_nonlocal ())
1782 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1783 else
1784 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1785 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1787 OK_DEFER_POP;
1789 /* Return the address of the result block. */
1790 result = copy_addr_to_reg (XEXP (result, 0));
1791 return convert_memory_address (ptr_mode, result);
1794 /* Perform an untyped return. */
1796 static void
1797 expand_builtin_return (rtx result)
1799 int size, align, regno;
1800 fixed_size_mode mode;
1801 rtx reg;
1802 rtx_insn *call_fusage = 0;
1804 result = convert_memory_address (Pmode, result);
1806 apply_result_size ();
1807 result = gen_rtx_MEM (BLKmode, result);
1809 if (targetm.have_untyped_return ())
1811 rtx vector = result_vector (0, result);
1812 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1813 emit_barrier ();
1814 return;
1817 /* Restore the return value and note that each value is used. */
1818 size = 0;
1819 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1820 if ((mode = apply_result_mode[regno]) != VOIDmode)
1822 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1823 if (size % align != 0)
1824 size = CEIL (size, align) * align;
1825 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1826 emit_move_insn (reg, adjust_address (result, mode, size));
1828 push_to_sequence (call_fusage);
1829 emit_use (reg);
1830 call_fusage = get_insns ();
1831 end_sequence ();
1832 size += GET_MODE_SIZE (mode);
1835 /* Put the USE insns before the return. */
1836 emit_insn (call_fusage);
1838 /* Return whatever values was restored by jumping directly to the end
1839 of the function. */
1840 expand_naked_return ();
1843 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1845 static enum type_class
1846 type_to_class (tree type)
1848 switch (TREE_CODE (type))
1850 case VOID_TYPE: return void_type_class;
1851 case INTEGER_TYPE: return integer_type_class;
1852 case ENUMERAL_TYPE: return enumeral_type_class;
1853 case BOOLEAN_TYPE: return boolean_type_class;
1854 case POINTER_TYPE: return pointer_type_class;
1855 case REFERENCE_TYPE: return reference_type_class;
1856 case OFFSET_TYPE: return offset_type_class;
1857 case REAL_TYPE: return real_type_class;
1858 case COMPLEX_TYPE: return complex_type_class;
1859 case FUNCTION_TYPE: return function_type_class;
1860 case METHOD_TYPE: return method_type_class;
1861 case RECORD_TYPE: return record_type_class;
1862 case UNION_TYPE:
1863 case QUAL_UNION_TYPE: return union_type_class;
1864 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1865 ? string_type_class : array_type_class);
1866 case LANG_TYPE: return lang_type_class;
1867 case OPAQUE_TYPE: return opaque_type_class;
1868 default: return no_type_class;
1872 /* Expand a call EXP to __builtin_classify_type. */
1874 static rtx
1875 expand_builtin_classify_type (tree exp)
1877 if (call_expr_nargs (exp))
1878 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1879 return GEN_INT (no_type_class);
1882 /* This helper macro, meant to be used in mathfn_built_in below, determines
1883 which among a set of builtin math functions is appropriate for a given type
1884 mode. The `F' (float) and `L' (long double) are automatically generated
1885 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1886 types, there are additional types that are considered with 'F32', 'F64',
1887 'F128', etc. suffixes. */
1888 #define CASE_MATHFN(MATHFN) \
1889 CASE_CFN_##MATHFN: \
1890 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1891 fcodel = BUILT_IN_##MATHFN##L ; break;
1892 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1893 types. */
1894 #define CASE_MATHFN_FLOATN(MATHFN) \
1895 CASE_CFN_##MATHFN: \
1896 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1897 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1898 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1899 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1900 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1901 break;
1902 /* Similar to above, but appends _R after any F/L suffix. */
1903 #define CASE_MATHFN_REENT(MATHFN) \
1904 case CFN_BUILT_IN_##MATHFN##_R: \
1905 case CFN_BUILT_IN_##MATHFN##F_R: \
1906 case CFN_BUILT_IN_##MATHFN##L_R: \
1907 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1908 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1910 /* Return a function equivalent to FN but operating on floating-point
1911 values of type TYPE, or END_BUILTINS if no such function exists.
1912 This is purely an operation on function codes; it does not guarantee
1913 that the target actually has an implementation of the function. */
1915 static built_in_function
1916 mathfn_built_in_2 (tree type, combined_fn fn)
1918 tree mtype;
1919 built_in_function fcode, fcodef, fcodel;
1920 built_in_function fcodef16 = END_BUILTINS;
1921 built_in_function fcodef32 = END_BUILTINS;
1922 built_in_function fcodef64 = END_BUILTINS;
1923 built_in_function fcodef128 = END_BUILTINS;
1924 built_in_function fcodef32x = END_BUILTINS;
1925 built_in_function fcodef64x = END_BUILTINS;
1926 built_in_function fcodef128x = END_BUILTINS;
1928 switch (fn)
1930 #define SEQ_OF_CASE_MATHFN \
1931 CASE_MATHFN (ACOS) \
1932 CASE_MATHFN (ACOSH) \
1933 CASE_MATHFN (ASIN) \
1934 CASE_MATHFN (ASINH) \
1935 CASE_MATHFN (ATAN) \
1936 CASE_MATHFN (ATAN2) \
1937 CASE_MATHFN (ATANH) \
1938 CASE_MATHFN (CBRT) \
1939 CASE_MATHFN_FLOATN (CEIL) \
1940 CASE_MATHFN (CEXPI) \
1941 CASE_MATHFN_FLOATN (COPYSIGN) \
1942 CASE_MATHFN (COS) \
1943 CASE_MATHFN (COSH) \
1944 CASE_MATHFN (DREM) \
1945 CASE_MATHFN (ERF) \
1946 CASE_MATHFN (ERFC) \
1947 CASE_MATHFN (EXP) \
1948 CASE_MATHFN (EXP10) \
1949 CASE_MATHFN (EXP2) \
1950 CASE_MATHFN (EXPM1) \
1951 CASE_MATHFN (FABS) \
1952 CASE_MATHFN (FDIM) \
1953 CASE_MATHFN_FLOATN (FLOOR) \
1954 CASE_MATHFN_FLOATN (FMA) \
1955 CASE_MATHFN_FLOATN (FMAX) \
1956 CASE_MATHFN_FLOATN (FMIN) \
1957 CASE_MATHFN (FMOD) \
1958 CASE_MATHFN (FREXP) \
1959 CASE_MATHFN (GAMMA) \
1960 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */ \
1961 CASE_MATHFN (HUGE_VAL) \
1962 CASE_MATHFN (HYPOT) \
1963 CASE_MATHFN (ILOGB) \
1964 CASE_MATHFN (ICEIL) \
1965 CASE_MATHFN (IFLOOR) \
1966 CASE_MATHFN (INF) \
1967 CASE_MATHFN (IRINT) \
1968 CASE_MATHFN (IROUND) \
1969 CASE_MATHFN (ISINF) \
1970 CASE_MATHFN (J0) \
1971 CASE_MATHFN (J1) \
1972 CASE_MATHFN (JN) \
1973 CASE_MATHFN (LCEIL) \
1974 CASE_MATHFN (LDEXP) \
1975 CASE_MATHFN (LFLOOR) \
1976 CASE_MATHFN (LGAMMA) \
1977 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */ \
1978 CASE_MATHFN (LLCEIL) \
1979 CASE_MATHFN (LLFLOOR) \
1980 CASE_MATHFN (LLRINT) \
1981 CASE_MATHFN (LLROUND) \
1982 CASE_MATHFN (LOG) \
1983 CASE_MATHFN (LOG10) \
1984 CASE_MATHFN (LOG1P) \
1985 CASE_MATHFN (LOG2) \
1986 CASE_MATHFN (LOGB) \
1987 CASE_MATHFN (LRINT) \
1988 CASE_MATHFN (LROUND) \
1989 CASE_MATHFN (MODF) \
1990 CASE_MATHFN (NAN) \
1991 CASE_MATHFN (NANS) \
1992 CASE_MATHFN_FLOATN (NEARBYINT) \
1993 CASE_MATHFN (NEXTAFTER) \
1994 CASE_MATHFN (NEXTTOWARD) \
1995 CASE_MATHFN (POW) \
1996 CASE_MATHFN (POWI) \
1997 CASE_MATHFN (POW10) \
1998 CASE_MATHFN (REMAINDER) \
1999 CASE_MATHFN (REMQUO) \
2000 CASE_MATHFN_FLOATN (RINT) \
2001 CASE_MATHFN_FLOATN (ROUND) \
2002 CASE_MATHFN_FLOATN (ROUNDEVEN) \
2003 CASE_MATHFN (SCALB) \
2004 CASE_MATHFN (SCALBLN) \
2005 CASE_MATHFN (SCALBN) \
2006 CASE_MATHFN (SIGNBIT) \
2007 CASE_MATHFN (SIGNIFICAND) \
2008 CASE_MATHFN (SIN) \
2009 CASE_MATHFN (SINCOS) \
2010 CASE_MATHFN (SINH) \
2011 CASE_MATHFN_FLOATN (SQRT) \
2012 CASE_MATHFN (TAN) \
2013 CASE_MATHFN (TANH) \
2014 CASE_MATHFN (TGAMMA) \
2015 CASE_MATHFN_FLOATN (TRUNC) \
2016 CASE_MATHFN (Y0) \
2017 CASE_MATHFN (Y1) \
2018 CASE_MATHFN (YN)
2020 SEQ_OF_CASE_MATHFN
2022 default:
2023 return END_BUILTINS;
2026 mtype = TYPE_MAIN_VARIANT (type);
2027 if (mtype == double_type_node)
2028 return fcode;
2029 else if (mtype == float_type_node)
2030 return fcodef;
2031 else if (mtype == long_double_type_node)
2032 return fcodel;
2033 else if (mtype == float16_type_node)
2034 return fcodef16;
2035 else if (mtype == float32_type_node)
2036 return fcodef32;
2037 else if (mtype == float64_type_node)
2038 return fcodef64;
2039 else if (mtype == float128_type_node)
2040 return fcodef128;
2041 else if (mtype == float32x_type_node)
2042 return fcodef32x;
2043 else if (mtype == float64x_type_node)
2044 return fcodef64x;
2045 else if (mtype == float128x_type_node)
2046 return fcodef128x;
2047 else
2048 return END_BUILTINS;
2051 #undef CASE_MATHFN
2052 #undef CASE_MATHFN_FLOATN
2053 #undef CASE_MATHFN_REENT
2055 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2056 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2057 otherwise use the explicit declaration. If we can't do the conversion,
2058 return null. */
2060 static tree
2061 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2063 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2064 if (fcode2 == END_BUILTINS)
2065 return NULL_TREE;
2067 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2068 return NULL_TREE;
2070 return builtin_decl_explicit (fcode2);
2073 /* Like mathfn_built_in_1, but always use the implicit array. */
2075 tree
2076 mathfn_built_in (tree type, combined_fn fn)
2078 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2081 /* Like mathfn_built_in_1, but take a built_in_function and
2082 always use the implicit array. */
2084 tree
2085 mathfn_built_in (tree type, enum built_in_function fn)
2087 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2090 /* Return the type associated with a built in function, i.e., the one
2091 to be passed to mathfn_built_in to get the type-specific
2092 function. */
2094 tree
2095 mathfn_built_in_type (combined_fn fn)
2097 #define CASE_MATHFN(MATHFN) \
2098 case CFN_BUILT_IN_##MATHFN: \
2099 return double_type_node; \
2100 case CFN_BUILT_IN_##MATHFN##F: \
2101 return float_type_node; \
2102 case CFN_BUILT_IN_##MATHFN##L: \
2103 return long_double_type_node;
2105 #define CASE_MATHFN_FLOATN(MATHFN) \
2106 CASE_MATHFN(MATHFN) \
2107 case CFN_BUILT_IN_##MATHFN##F16: \
2108 return float16_type_node; \
2109 case CFN_BUILT_IN_##MATHFN##F32: \
2110 return float32_type_node; \
2111 case CFN_BUILT_IN_##MATHFN##F64: \
2112 return float64_type_node; \
2113 case CFN_BUILT_IN_##MATHFN##F128: \
2114 return float128_type_node; \
2115 case CFN_BUILT_IN_##MATHFN##F32X: \
2116 return float32x_type_node; \
2117 case CFN_BUILT_IN_##MATHFN##F64X: \
2118 return float64x_type_node; \
2119 case CFN_BUILT_IN_##MATHFN##F128X: \
2120 return float128x_type_node;
2122 /* Similar to above, but appends _R after any F/L suffix. */
2123 #define CASE_MATHFN_REENT(MATHFN) \
2124 case CFN_BUILT_IN_##MATHFN##_R: \
2125 return double_type_node; \
2126 case CFN_BUILT_IN_##MATHFN##F_R: \
2127 return float_type_node; \
2128 case CFN_BUILT_IN_##MATHFN##L_R: \
2129 return long_double_type_node;
2131 switch (fn)
2133 SEQ_OF_CASE_MATHFN
2135 default:
2136 return NULL_TREE;
2139 #undef CASE_MATHFN
2140 #undef CASE_MATHFN_FLOATN
2141 #undef CASE_MATHFN_REENT
2142 #undef SEQ_OF_CASE_MATHFN
2145 /* Check whether there is an internal function associated with function FN
2146 and return type RETURN_TYPE. Return the function if so, otherwise return
2147 IFN_LAST.
2149 Note that this function only tests whether the function is defined in
2150 internals.def, not whether it is actually available on the target. */
2152 static internal_fn
2153 associated_internal_fn (built_in_function fn, tree return_type)
2155 switch (fn)
2157 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2158 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2159 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2160 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2161 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2162 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2163 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2164 #include "internal-fn.def"
2166 CASE_FLT_FN (BUILT_IN_POW10):
2167 return IFN_EXP10;
2169 CASE_FLT_FN (BUILT_IN_DREM):
2170 return IFN_REMAINDER;
2172 CASE_FLT_FN (BUILT_IN_SCALBN):
2173 CASE_FLT_FN (BUILT_IN_SCALBLN):
2174 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2175 return IFN_LDEXP;
2176 return IFN_LAST;
2178 default:
2179 return IFN_LAST;
2183 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2184 return its code, otherwise return IFN_LAST. Note that this function
2185 only tests whether the function is defined in internals.def, not whether
2186 it is actually available on the target. */
2188 internal_fn
2189 associated_internal_fn (tree fndecl)
2191 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2192 return associated_internal_fn (DECL_FUNCTION_CODE (fndecl),
2193 TREE_TYPE (TREE_TYPE (fndecl)));
2196 /* Check whether there is an internal function associated with function CFN
2197 and return type RETURN_TYPE. Return the function if so, otherwise return
2198 IFN_LAST.
2200 Note that this function only tests whether the function is defined in
2201 internals.def, not whether it is actually available on the target. */
2203 internal_fn
2204 associated_internal_fn (combined_fn cfn, tree return_type)
2206 if (internal_fn_p (cfn))
2207 return as_internal_fn (cfn);
2208 return associated_internal_fn (as_builtin_fn (cfn), return_type);
2211 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2212 on the current target by a call to an internal function, return the
2213 code of that internal function, otherwise return IFN_LAST. The caller
2214 is responsible for ensuring that any side-effects of the built-in
2215 call are dealt with correctly. E.g. if CALL sets errno, the caller
2216 must decide that the errno result isn't needed or make it available
2217 in some other way. */
2219 internal_fn
2220 replacement_internal_fn (gcall *call)
2222 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2224 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2225 if (ifn != IFN_LAST)
2227 tree_pair types = direct_internal_fn_types (ifn, call);
2228 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2229 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2230 return ifn;
2233 return IFN_LAST;
2236 /* Expand a call to the builtin trinary math functions (fma).
2237 Return NULL_RTX if a normal call should be emitted rather than expanding the
2238 function in-line. EXP is the expression that is a call to the builtin
2239 function; if convenient, the result should be placed in TARGET.
2240 SUBTARGET may be used as the target for computing one of EXP's
2241 operands. */
2243 static rtx
2244 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2246 optab builtin_optab;
2247 rtx op0, op1, op2, result;
2248 rtx_insn *insns;
2249 tree fndecl = get_callee_fndecl (exp);
2250 tree arg0, arg1, arg2;
2251 machine_mode mode;
2253 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2254 return NULL_RTX;
2256 arg0 = CALL_EXPR_ARG (exp, 0);
2257 arg1 = CALL_EXPR_ARG (exp, 1);
2258 arg2 = CALL_EXPR_ARG (exp, 2);
2260 switch (DECL_FUNCTION_CODE (fndecl))
2262 CASE_FLT_FN (BUILT_IN_FMA):
2263 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2264 builtin_optab = fma_optab; break;
2265 default:
2266 gcc_unreachable ();
2269 /* Make a suitable register to place result in. */
2270 mode = TYPE_MODE (TREE_TYPE (exp));
2272 /* Before working hard, check whether the instruction is available. */
2273 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2274 return NULL_RTX;
2276 result = gen_reg_rtx (mode);
2278 /* Always stabilize the argument list. */
2279 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2280 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2281 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2283 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2284 op1 = expand_normal (arg1);
2285 op2 = expand_normal (arg2);
2287 start_sequence ();
2289 /* Compute into RESULT.
2290 Set RESULT to wherever the result comes back. */
2291 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2292 result, 0);
2294 /* If we were unable to expand via the builtin, stop the sequence
2295 (without outputting the insns) and call to the library function
2296 with the stabilized argument list. */
2297 if (result == 0)
2299 end_sequence ();
2300 return expand_call (exp, target, target == const0_rtx);
2303 /* Output the entire sequence. */
2304 insns = get_insns ();
2305 end_sequence ();
2306 emit_insn (insns);
2308 return result;
2311 /* Expand a call to the builtin sin and cos math functions.
2312 Return NULL_RTX if a normal call should be emitted rather than expanding the
2313 function in-line. EXP is the expression that is a call to the builtin
2314 function; if convenient, the result should be placed in TARGET.
2315 SUBTARGET may be used as the target for computing one of EXP's
2316 operands. */
2318 static rtx
2319 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2321 optab builtin_optab;
2322 rtx op0;
2323 rtx_insn *insns;
2324 tree fndecl = get_callee_fndecl (exp);
2325 machine_mode mode;
2326 tree arg;
2328 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2329 return NULL_RTX;
2331 arg = CALL_EXPR_ARG (exp, 0);
2333 switch (DECL_FUNCTION_CODE (fndecl))
2335 CASE_FLT_FN (BUILT_IN_SIN):
2336 CASE_FLT_FN (BUILT_IN_COS):
2337 builtin_optab = sincos_optab; break;
2338 default:
2339 gcc_unreachable ();
2342 /* Make a suitable register to place result in. */
2343 mode = TYPE_MODE (TREE_TYPE (exp));
2345 /* Check if sincos insn is available, otherwise fallback
2346 to sin or cos insn. */
2347 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2348 switch (DECL_FUNCTION_CODE (fndecl))
2350 CASE_FLT_FN (BUILT_IN_SIN):
2351 builtin_optab = sin_optab; break;
2352 CASE_FLT_FN (BUILT_IN_COS):
2353 builtin_optab = cos_optab; break;
2354 default:
2355 gcc_unreachable ();
2358 /* Before working hard, check whether the instruction is available. */
2359 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2361 rtx result = gen_reg_rtx (mode);
2363 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2364 need to expand the argument again. This way, we will not perform
2365 side-effects more the once. */
2366 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2368 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2370 start_sequence ();
2372 /* Compute into RESULT.
2373 Set RESULT to wherever the result comes back. */
2374 if (builtin_optab == sincos_optab)
2376 int ok;
2378 switch (DECL_FUNCTION_CODE (fndecl))
2380 CASE_FLT_FN (BUILT_IN_SIN):
2381 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2382 break;
2383 CASE_FLT_FN (BUILT_IN_COS):
2384 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2385 break;
2386 default:
2387 gcc_unreachable ();
2389 gcc_assert (ok);
2391 else
2392 result = expand_unop (mode, builtin_optab, op0, result, 0);
2394 if (result != 0)
2396 /* Output the entire sequence. */
2397 insns = get_insns ();
2398 end_sequence ();
2399 emit_insn (insns);
2400 return result;
2403 /* If we were unable to expand via the builtin, stop the sequence
2404 (without outputting the insns) and call to the library function
2405 with the stabilized argument list. */
2406 end_sequence ();
2409 return expand_call (exp, target, target == const0_rtx);
2412 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2413 return an RTL instruction code that implements the functionality.
2414 If that isn't possible or available return CODE_FOR_nothing. */
2416 static enum insn_code
2417 interclass_mathfn_icode (tree arg, tree fndecl)
2419 bool errno_set = false;
2420 optab builtin_optab = unknown_optab;
2421 machine_mode mode;
2423 switch (DECL_FUNCTION_CODE (fndecl))
2425 CASE_FLT_FN (BUILT_IN_ILOGB):
2426 errno_set = true; builtin_optab = ilogb_optab; break;
2427 CASE_FLT_FN (BUILT_IN_ISINF):
2428 builtin_optab = isinf_optab; break;
2429 case BUILT_IN_ISNORMAL:
2430 case BUILT_IN_ISFINITE:
2431 CASE_FLT_FN (BUILT_IN_FINITE):
2432 case BUILT_IN_FINITED32:
2433 case BUILT_IN_FINITED64:
2434 case BUILT_IN_FINITED128:
2435 case BUILT_IN_ISINFD32:
2436 case BUILT_IN_ISINFD64:
2437 case BUILT_IN_ISINFD128:
2438 /* These builtins have no optabs (yet). */
2439 break;
2440 default:
2441 gcc_unreachable ();
2444 /* There's no easy way to detect the case we need to set EDOM. */
2445 if (flag_errno_math && errno_set)
2446 return CODE_FOR_nothing;
2448 /* Optab mode depends on the mode of the input argument. */
2449 mode = TYPE_MODE (TREE_TYPE (arg));
2451 if (builtin_optab)
2452 return optab_handler (builtin_optab, mode);
2453 return CODE_FOR_nothing;
2456 /* Expand a call to one of the builtin math functions that operate on
2457 floating point argument and output an integer result (ilogb, isinf,
2458 isnan, etc).
2459 Return 0 if a normal call should be emitted rather than expanding the
2460 function in-line. EXP is the expression that is a call to the builtin
2461 function; if convenient, the result should be placed in TARGET. */
2463 static rtx
2464 expand_builtin_interclass_mathfn (tree exp, rtx target)
2466 enum insn_code icode = CODE_FOR_nothing;
2467 rtx op0;
2468 tree fndecl = get_callee_fndecl (exp);
2469 machine_mode mode;
2470 tree arg;
2472 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2473 return NULL_RTX;
2475 arg = CALL_EXPR_ARG (exp, 0);
2476 icode = interclass_mathfn_icode (arg, fndecl);
2477 mode = TYPE_MODE (TREE_TYPE (arg));
2479 if (icode != CODE_FOR_nothing)
2481 class expand_operand ops[1];
2482 rtx_insn *last = get_last_insn ();
2483 tree orig_arg = arg;
2485 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2486 need to expand the argument again. This way, we will not perform
2487 side-effects more the once. */
2488 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2490 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2492 if (mode != GET_MODE (op0))
2493 op0 = convert_to_mode (mode, op0, 0);
2495 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2496 if (maybe_legitimize_operands (icode, 0, 1, ops)
2497 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2498 return ops[0].value;
2500 delete_insns_since (last);
2501 CALL_EXPR_ARG (exp, 0) = orig_arg;
2504 return NULL_RTX;
2507 /* Expand a call to the builtin sincos math function.
2508 Return NULL_RTX if a normal call should be emitted rather than expanding the
2509 function in-line. EXP is the expression that is a call to the builtin
2510 function. */
2512 static rtx
2513 expand_builtin_sincos (tree exp)
2515 rtx op0, op1, op2, target1, target2;
2516 machine_mode mode;
2517 tree arg, sinp, cosp;
2518 int result;
2519 location_t loc = EXPR_LOCATION (exp);
2520 tree alias_type, alias_off;
2522 if (!validate_arglist (exp, REAL_TYPE,
2523 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2524 return NULL_RTX;
2526 arg = CALL_EXPR_ARG (exp, 0);
2527 sinp = CALL_EXPR_ARG (exp, 1);
2528 cosp = CALL_EXPR_ARG (exp, 2);
2530 /* Make a suitable register to place result in. */
2531 mode = TYPE_MODE (TREE_TYPE (arg));
2533 /* Check if sincos insn is available, otherwise emit the call. */
2534 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2535 return NULL_RTX;
2537 target1 = gen_reg_rtx (mode);
2538 target2 = gen_reg_rtx (mode);
2540 op0 = expand_normal (arg);
2541 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2542 alias_off = build_int_cst (alias_type, 0);
2543 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2544 sinp, alias_off));
2545 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2546 cosp, alias_off));
2548 /* Compute into target1 and target2.
2549 Set TARGET to wherever the result comes back. */
2550 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2551 gcc_assert (result);
2553 /* Move target1 and target2 to the memory locations indicated
2554 by op1 and op2. */
2555 emit_move_insn (op1, target1);
2556 emit_move_insn (op2, target2);
2558 return const0_rtx;
2561 /* Expand call EXP to the fegetround builtin (from C99 fenv.h), returning the
2562 result and setting it in TARGET. Otherwise return NULL_RTX on failure. */
2563 static rtx
2564 expand_builtin_fegetround (tree exp, rtx target, machine_mode target_mode)
2566 if (!validate_arglist (exp, VOID_TYPE))
2567 return NULL_RTX;
2569 insn_code icode = direct_optab_handler (fegetround_optab, SImode);
2570 if (icode == CODE_FOR_nothing)
2571 return NULL_RTX;
2573 if (target == 0
2574 || GET_MODE (target) != target_mode
2575 || !(*insn_data[icode].operand[0].predicate) (target, target_mode))
2576 target = gen_reg_rtx (target_mode);
2578 rtx pat = GEN_FCN (icode) (target);
2579 if (!pat)
2580 return NULL_RTX;
2581 emit_insn (pat);
2583 return target;
2586 /* Expand call EXP to either feclearexcept or feraiseexcept builtins (from C99
2587 fenv.h), returning the result and setting it in TARGET. Otherwise return
2588 NULL_RTX on failure. */
2589 static rtx
2590 expand_builtin_feclear_feraise_except (tree exp, rtx target,
2591 machine_mode target_mode, optab op_optab)
2593 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
2594 return NULL_RTX;
2595 rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
2597 insn_code icode = direct_optab_handler (op_optab, SImode);
2598 if (icode == CODE_FOR_nothing)
2599 return NULL_RTX;
2601 if (!(*insn_data[icode].operand[1].predicate) (op0, GET_MODE (op0)))
2602 return NULL_RTX;
2604 if (target == 0
2605 || GET_MODE (target) != target_mode
2606 || !(*insn_data[icode].operand[0].predicate) (target, target_mode))
2607 target = gen_reg_rtx (target_mode);
2609 rtx pat = GEN_FCN (icode) (target, op0);
2610 if (!pat)
2611 return NULL_RTX;
2612 emit_insn (pat);
2614 return target;
2617 /* Expand a call to the internal cexpi builtin to the sincos math function.
2618 EXP is the expression that is a call to the builtin function; if convenient,
2619 the result should be placed in TARGET. */
2621 static rtx
2622 expand_builtin_cexpi (tree exp, rtx target)
2624 tree fndecl = get_callee_fndecl (exp);
2625 tree arg, type;
2626 machine_mode mode;
2627 rtx op0, op1, op2;
2628 location_t loc = EXPR_LOCATION (exp);
2630 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2631 return NULL_RTX;
2633 arg = CALL_EXPR_ARG (exp, 0);
2634 type = TREE_TYPE (arg);
2635 mode = TYPE_MODE (TREE_TYPE (arg));
2637 /* Try expanding via a sincos optab, fall back to emitting a libcall
2638 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2639 is only generated from sincos, cexp or if we have either of them. */
2640 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2642 op1 = gen_reg_rtx (mode);
2643 op2 = gen_reg_rtx (mode);
2645 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2647 /* Compute into op1 and op2. */
2648 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2650 else if (targetm.libc_has_function (function_sincos, type))
2652 tree call, fn = NULL_TREE;
2653 tree top1, top2;
2654 rtx op1a, op2a;
2656 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2657 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2658 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2659 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2660 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2661 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2662 else
2663 gcc_unreachable ();
2665 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2666 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2667 op1a = copy_addr_to_reg (XEXP (op1, 0));
2668 op2a = copy_addr_to_reg (XEXP (op2, 0));
2669 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2670 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2672 /* Make sure not to fold the sincos call again. */
2673 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2674 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2675 call, 3, arg, top1, top2));
2677 else
2679 tree call, fn = NULL_TREE, narg;
2680 tree ctype = build_complex_type (type);
2682 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2683 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2684 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2685 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2686 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2687 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2688 else
2689 gcc_unreachable ();
2691 /* If we don't have a decl for cexp create one. This is the
2692 friendliest fallback if the user calls __builtin_cexpi
2693 without full target C99 function support. */
2694 if (fn == NULL_TREE)
2696 tree fntype;
2697 const char *name = NULL;
2699 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2700 name = "cexpf";
2701 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2702 name = "cexp";
2703 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2704 name = "cexpl";
2706 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2707 fn = build_fn_decl (name, fntype);
2710 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2711 build_real (type, dconst0), arg);
2713 /* Make sure not to fold the cexp call again. */
2714 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2715 return expand_expr (build_call_nary (ctype, call, 1, narg),
2716 target, VOIDmode, EXPAND_NORMAL);
2719 /* Now build the proper return type. */
2720 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2721 make_tree (TREE_TYPE (arg), op2),
2722 make_tree (TREE_TYPE (arg), op1)),
2723 target, VOIDmode, EXPAND_NORMAL);
2726 /* Conveniently construct a function call expression. FNDECL names the
2727 function to be called, N is the number of arguments, and the "..."
2728 parameters are the argument expressions. Unlike build_call_exr
2729 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2731 static tree
2732 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2734 va_list ap;
2735 tree fntype = TREE_TYPE (fndecl);
2736 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2738 va_start (ap, n);
2739 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2740 va_end (ap);
2741 SET_EXPR_LOCATION (fn, loc);
2742 return fn;
2745 /* Expand a call to one of the builtin rounding functions gcc defines
2746 as an extension (lfloor and lceil). As these are gcc extensions we
2747 do not need to worry about setting errno to EDOM.
2748 If expanding via optab fails, lower expression to (int)(floor(x)).
2749 EXP is the expression that is a call to the builtin function;
2750 if convenient, the result should be placed in TARGET. */
2752 static rtx
2753 expand_builtin_int_roundingfn (tree exp, rtx target)
2755 convert_optab builtin_optab;
2756 rtx op0, tmp;
2757 rtx_insn *insns;
2758 tree fndecl = get_callee_fndecl (exp);
2759 enum built_in_function fallback_fn;
2760 tree fallback_fndecl;
2761 machine_mode mode;
2762 tree arg;
2764 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2765 return NULL_RTX;
2767 arg = CALL_EXPR_ARG (exp, 0);
2769 switch (DECL_FUNCTION_CODE (fndecl))
2771 CASE_FLT_FN (BUILT_IN_ICEIL):
2772 CASE_FLT_FN (BUILT_IN_LCEIL):
2773 CASE_FLT_FN (BUILT_IN_LLCEIL):
2774 builtin_optab = lceil_optab;
2775 fallback_fn = BUILT_IN_CEIL;
2776 break;
2778 CASE_FLT_FN (BUILT_IN_IFLOOR):
2779 CASE_FLT_FN (BUILT_IN_LFLOOR):
2780 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2781 builtin_optab = lfloor_optab;
2782 fallback_fn = BUILT_IN_FLOOR;
2783 break;
2785 default:
2786 gcc_unreachable ();
2789 /* Make a suitable register to place result in. */
2790 mode = TYPE_MODE (TREE_TYPE (exp));
2792 target = gen_reg_rtx (mode);
2794 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2795 need to expand the argument again. This way, we will not perform
2796 side-effects more the once. */
2797 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2799 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2801 start_sequence ();
2803 /* Compute into TARGET. */
2804 if (expand_sfix_optab (target, op0, builtin_optab))
2806 /* Output the entire sequence. */
2807 insns = get_insns ();
2808 end_sequence ();
2809 emit_insn (insns);
2810 return target;
2813 /* If we were unable to expand via the builtin, stop the sequence
2814 (without outputting the insns). */
2815 end_sequence ();
2817 /* Fall back to floating point rounding optab. */
2818 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2820 /* For non-C99 targets we may end up without a fallback fndecl here
2821 if the user called __builtin_lfloor directly. In this case emit
2822 a call to the floor/ceil variants nevertheless. This should result
2823 in the best user experience for not full C99 targets. */
2824 if (fallback_fndecl == NULL_TREE)
2826 tree fntype;
2827 const char *name = NULL;
2829 switch (DECL_FUNCTION_CODE (fndecl))
2831 case BUILT_IN_ICEIL:
2832 case BUILT_IN_LCEIL:
2833 case BUILT_IN_LLCEIL:
2834 name = "ceil";
2835 break;
2836 case BUILT_IN_ICEILF:
2837 case BUILT_IN_LCEILF:
2838 case BUILT_IN_LLCEILF:
2839 name = "ceilf";
2840 break;
2841 case BUILT_IN_ICEILL:
2842 case BUILT_IN_LCEILL:
2843 case BUILT_IN_LLCEILL:
2844 name = "ceill";
2845 break;
2846 case BUILT_IN_IFLOOR:
2847 case BUILT_IN_LFLOOR:
2848 case BUILT_IN_LLFLOOR:
2849 name = "floor";
2850 break;
2851 case BUILT_IN_IFLOORF:
2852 case BUILT_IN_LFLOORF:
2853 case BUILT_IN_LLFLOORF:
2854 name = "floorf";
2855 break;
2856 case BUILT_IN_IFLOORL:
2857 case BUILT_IN_LFLOORL:
2858 case BUILT_IN_LLFLOORL:
2859 name = "floorl";
2860 break;
2861 default:
2862 gcc_unreachable ();
2865 fntype = build_function_type_list (TREE_TYPE (arg),
2866 TREE_TYPE (arg), NULL_TREE);
2867 fallback_fndecl = build_fn_decl (name, fntype);
2870 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2872 tmp = expand_normal (exp);
2873 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2875 /* Truncate the result of floating point optab to integer
2876 via expand_fix (). */
2877 target = gen_reg_rtx (mode);
2878 expand_fix (target, tmp, 0);
2880 return target;
2883 /* Expand a call to one of the builtin math functions doing integer
2884 conversion (lrint).
2885 Return 0 if a normal call should be emitted rather than expanding the
2886 function in-line. EXP is the expression that is a call to the builtin
2887 function; if convenient, the result should be placed in TARGET. */
2889 static rtx
2890 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2892 convert_optab builtin_optab;
2893 rtx op0;
2894 rtx_insn *insns;
2895 tree fndecl = get_callee_fndecl (exp);
2896 tree arg;
2897 machine_mode mode;
2898 enum built_in_function fallback_fn = BUILT_IN_NONE;
2900 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2901 return NULL_RTX;
2903 arg = CALL_EXPR_ARG (exp, 0);
2905 switch (DECL_FUNCTION_CODE (fndecl))
2907 CASE_FLT_FN (BUILT_IN_IRINT):
2908 fallback_fn = BUILT_IN_LRINT;
2909 gcc_fallthrough ();
2910 CASE_FLT_FN (BUILT_IN_LRINT):
2911 CASE_FLT_FN (BUILT_IN_LLRINT):
2912 builtin_optab = lrint_optab;
2913 break;
2915 CASE_FLT_FN (BUILT_IN_IROUND):
2916 fallback_fn = BUILT_IN_LROUND;
2917 gcc_fallthrough ();
2918 CASE_FLT_FN (BUILT_IN_LROUND):
2919 CASE_FLT_FN (BUILT_IN_LLROUND):
2920 builtin_optab = lround_optab;
2921 break;
2923 default:
2924 gcc_unreachable ();
2927 /* There's no easy way to detect the case we need to set EDOM. */
2928 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2929 return NULL_RTX;
2931 /* Make a suitable register to place result in. */
2932 mode = TYPE_MODE (TREE_TYPE (exp));
2934 /* There's no easy way to detect the case we need to set EDOM. */
2935 if (!flag_errno_math)
2937 rtx result = gen_reg_rtx (mode);
2939 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2940 need to expand the argument again. This way, we will not perform
2941 side-effects more the once. */
2942 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2944 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2946 start_sequence ();
2948 if (expand_sfix_optab (result, op0, builtin_optab))
2950 /* Output the entire sequence. */
2951 insns = get_insns ();
2952 end_sequence ();
2953 emit_insn (insns);
2954 return result;
2957 /* If we were unable to expand via the builtin, stop the sequence
2958 (without outputting the insns) and call to the library function
2959 with the stabilized argument list. */
2960 end_sequence ();
2963 if (fallback_fn != BUILT_IN_NONE)
2965 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2966 targets, (int) round (x) should never be transformed into
2967 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2968 a call to lround in the hope that the target provides at least some
2969 C99 functions. This should result in the best user experience for
2970 not full C99 targets.
2971 As scalar float conversions with same mode are useless in GIMPLE,
2972 we can end up e.g. with _Float32 argument passed to float builtin,
2973 try to get the type from the builtin prototype first. */
2974 tree fallback_fndecl = NULL_TREE;
2975 if (tree argtypes = TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
2976 fallback_fndecl
2977 = mathfn_built_in_1 (TREE_VALUE (argtypes),
2978 as_combined_fn (fallback_fn), 0);
2979 if (fallback_fndecl == NULL_TREE)
2980 fallback_fndecl
2981 = mathfn_built_in_1 (TREE_TYPE (arg),
2982 as_combined_fn (fallback_fn), 0);
2983 if (fallback_fndecl)
2985 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2986 fallback_fndecl, 1, arg);
2988 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2989 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2990 return convert_to_mode (mode, target, 0);
2994 return expand_call (exp, target, target == const0_rtx);
2997 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2998 a normal call should be emitted rather than expanding the function
2999 in-line. EXP is the expression that is a call to the builtin
3000 function; if convenient, the result should be placed in TARGET. */
3002 static rtx
3003 expand_builtin_powi (tree exp, rtx target)
3005 tree arg0, arg1;
3006 rtx op0, op1;
3007 machine_mode mode;
3008 machine_mode mode2;
3010 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3011 return NULL_RTX;
3013 arg0 = CALL_EXPR_ARG (exp, 0);
3014 arg1 = CALL_EXPR_ARG (exp, 1);
3015 mode = TYPE_MODE (TREE_TYPE (exp));
3017 /* Emit a libcall to libgcc. */
3019 /* Mode of the 2nd argument must match that of an int. */
3020 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
3022 if (target == NULL_RTX)
3023 target = gen_reg_rtx (mode);
3025 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
3026 if (GET_MODE (op0) != mode)
3027 op0 = convert_to_mode (mode, op0, 0);
3028 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3029 if (GET_MODE (op1) != mode2)
3030 op1 = convert_to_mode (mode2, op1, 0);
3032 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3033 target, LCT_CONST, mode,
3034 op0, mode, op1, mode2);
3036 return target;
3039 /* Expand expression EXP which is a call to the strlen builtin. Return
3040 NULL_RTX if we failed and the caller should emit a normal call, otherwise
3041 try to get the result in TARGET, if convenient. */
3043 static rtx
3044 expand_builtin_strlen (tree exp, rtx target,
3045 machine_mode target_mode)
3047 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3048 return NULL_RTX;
3050 tree src = CALL_EXPR_ARG (exp, 0);
3052 /* If the length can be computed at compile-time, return it. */
3053 if (tree len = c_strlen (src, 0))
3054 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3056 /* If the length can be computed at compile-time and is constant
3057 integer, but there are side-effects in src, evaluate
3058 src for side-effects, then return len.
3059 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3060 can be optimized into: i++; x = 3; */
3061 tree len = c_strlen (src, 1);
3062 if (len && TREE_CODE (len) == INTEGER_CST)
3064 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3065 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3068 unsigned int align = get_pointer_alignment (src) / BITS_PER_UNIT;
3070 /* If SRC is not a pointer type, don't do this operation inline. */
3071 if (align == 0)
3072 return NULL_RTX;
3074 /* Bail out if we can't compute strlen in the right mode. */
3075 machine_mode insn_mode;
3076 enum insn_code icode = CODE_FOR_nothing;
3077 FOR_EACH_MODE_FROM (insn_mode, target_mode)
3079 icode = optab_handler (strlen_optab, insn_mode);
3080 if (icode != CODE_FOR_nothing)
3081 break;
3083 if (insn_mode == VOIDmode)
3084 return NULL_RTX;
3086 /* Make a place to hold the source address. We will not expand
3087 the actual source until we are sure that the expansion will
3088 not fail -- there are trees that cannot be expanded twice. */
3089 rtx src_reg = gen_reg_rtx (Pmode);
3091 /* Mark the beginning of the strlen sequence so we can emit the
3092 source operand later. */
3093 rtx_insn *before_strlen = get_last_insn ();
3095 class expand_operand ops[4];
3096 create_output_operand (&ops[0], target, insn_mode);
3097 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3098 create_integer_operand (&ops[2], 0);
3099 create_integer_operand (&ops[3], align);
3100 if (!maybe_expand_insn (icode, 4, ops))
3101 return NULL_RTX;
3103 /* Check to see if the argument was declared attribute nonstring
3104 and if so, issue a warning since at this point it's not known
3105 to be nul-terminated. */
3106 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3108 /* Now that we are assured of success, expand the source. */
3109 start_sequence ();
3110 rtx pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3111 if (pat != src_reg)
3113 #ifdef POINTERS_EXTEND_UNSIGNED
3114 if (GET_MODE (pat) != Pmode)
3115 pat = convert_to_mode (Pmode, pat,
3116 POINTERS_EXTEND_UNSIGNED);
3117 #endif
3118 emit_move_insn (src_reg, pat);
3120 pat = get_insns ();
3121 end_sequence ();
3123 if (before_strlen)
3124 emit_insn_after (pat, before_strlen);
3125 else
3126 emit_insn_before (pat, get_insns ());
3128 /* Return the value in the proper mode for this function. */
3129 if (GET_MODE (ops[0].value) == target_mode)
3130 target = ops[0].value;
3131 else if (target != 0)
3132 convert_move (target, ops[0].value, 0);
3133 else
3134 target = convert_to_mode (target_mode, ops[0].value, 0);
3136 return target;
3139 /* Expand call EXP to the strnlen built-in, returning the result
3140 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3142 static rtx
3143 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3145 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3146 return NULL_RTX;
3148 tree src = CALL_EXPR_ARG (exp, 0);
3149 tree bound = CALL_EXPR_ARG (exp, 1);
3151 if (!bound)
3152 return NULL_RTX;
3154 location_t loc = UNKNOWN_LOCATION;
3155 if (EXPR_HAS_LOCATION (exp))
3156 loc = EXPR_LOCATION (exp);
3158 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3159 so these conversions aren't necessary. */
3160 c_strlen_data lendata = { };
3161 tree len = c_strlen (src, 0, &lendata, 1);
3162 if (len)
3163 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3165 if (TREE_CODE (bound) == INTEGER_CST)
3167 if (!len)
3168 return NULL_RTX;
3170 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3171 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3174 if (TREE_CODE (bound) != SSA_NAME)
3175 return NULL_RTX;
3177 wide_int min, max;
3178 value_range r;
3179 get_global_range_query ()->range_of_expr (r, bound);
3180 if (r.kind () != VR_RANGE)
3181 return NULL_RTX;
3182 min = r.lower_bound ();
3183 max = r.upper_bound ();
3185 if (!len || TREE_CODE (len) != INTEGER_CST)
3187 bool exact;
3188 lendata.decl = unterminated_array (src, &len, &exact);
3189 if (!lendata.decl)
3190 return NULL_RTX;
3193 if (lendata.decl)
3194 return NULL_RTX;
3196 if (wi::gtu_p (min, wi::to_wide (len)))
3197 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3199 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3200 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3203 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3204 bytes from bytes at DATA + OFFSET and return it reinterpreted as
3205 a target constant. */
3207 static rtx
3208 builtin_memcpy_read_str (void *data, void *, HOST_WIDE_INT offset,
3209 fixed_size_mode mode)
3211 /* The REPresentation pointed to by DATA need not be a nul-terminated
3212 string but the caller guarantees it's large enough for MODE. */
3213 const char *rep = (const char *) data;
3215 /* The by-pieces infrastructure does not try to pick a vector mode
3216 for memcpy expansion. */
3217 return c_readstr (rep + offset, as_a <scalar_int_mode> (mode),
3218 /*nul_terminated=*/false);
3221 /* LEN specify length of the block of memcpy/memset operation.
3222 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3223 In some cases we can make very likely guess on max size, then we
3224 set it into PROBABLE_MAX_SIZE. */
3226 static void
3227 determine_block_size (tree len, rtx len_rtx,
3228 unsigned HOST_WIDE_INT *min_size,
3229 unsigned HOST_WIDE_INT *max_size,
3230 unsigned HOST_WIDE_INT *probable_max_size)
3232 if (CONST_INT_P (len_rtx))
3234 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3235 return;
3237 else
3239 wide_int min, max;
3240 enum value_range_kind range_type = VR_UNDEFINED;
3242 /* Determine bounds from the type. */
3243 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3244 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3245 else
3246 *min_size = 0;
3247 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3248 *probable_max_size = *max_size
3249 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3250 else
3251 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3253 if (TREE_CODE (len) == SSA_NAME)
3255 value_range r;
3256 get_global_range_query ()->range_of_expr (r, len);
3257 range_type = r.kind ();
3258 if (range_type != VR_UNDEFINED)
3260 min = wi::to_wide (r.min ());
3261 max = wi::to_wide (r.max ());
3264 if (range_type == VR_RANGE)
3266 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3267 *min_size = min.to_uhwi ();
3268 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3269 *probable_max_size = *max_size = max.to_uhwi ();
3271 else if (range_type == VR_ANTI_RANGE)
3273 /* Code like
3275 int n;
3276 if (n < 100)
3277 memcpy (a, b, n)
3279 Produce anti range allowing negative values of N. We still
3280 can use the information and make a guess that N is not negative.
3282 if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3283 *probable_max_size = min.to_uhwi () - 1;
3286 gcc_checking_assert (*max_size <=
3287 (unsigned HOST_WIDE_INT)
3288 GET_MODE_MASK (GET_MODE (len_rtx)));
3291 /* Expand a call EXP to the memcpy builtin.
3292 Return NULL_RTX if we failed, the caller should emit a normal call,
3293 otherwise try to get the result in TARGET, if convenient (and in
3294 mode MODE if that's convenient). */
3296 static rtx
3297 expand_builtin_memcpy (tree exp, rtx target)
3299 if (!validate_arglist (exp,
3300 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3301 return NULL_RTX;
3303 tree dest = CALL_EXPR_ARG (exp, 0);
3304 tree src = CALL_EXPR_ARG (exp, 1);
3305 tree len = CALL_EXPR_ARG (exp, 2);
3307 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3308 /*retmode=*/ RETURN_BEGIN, false);
3311 /* Check a call EXP to the memmove built-in for validity.
3312 Return NULL_RTX on both success and failure. */
3314 static rtx
3315 expand_builtin_memmove (tree exp, rtx target)
3317 if (!validate_arglist (exp,
3318 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3319 return NULL_RTX;
3321 tree dest = CALL_EXPR_ARG (exp, 0);
3322 tree src = CALL_EXPR_ARG (exp, 1);
3323 tree len = CALL_EXPR_ARG (exp, 2);
3325 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3326 /*retmode=*/ RETURN_BEGIN, true);
3329 /* Expand a call EXP to the mempcpy builtin.
3330 Return NULL_RTX if we failed; the caller should emit a normal call,
3331 otherwise try to get the result in TARGET, if convenient (and in
3332 mode MODE if that's convenient). */
3334 static rtx
3335 expand_builtin_mempcpy (tree exp, rtx target)
3337 if (!validate_arglist (exp,
3338 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3339 return NULL_RTX;
3341 tree dest = CALL_EXPR_ARG (exp, 0);
3342 tree src = CALL_EXPR_ARG (exp, 1);
3343 tree len = CALL_EXPR_ARG (exp, 2);
3345 /* Policy does not generally allow using compute_objsize (which
3346 is used internally by check_memop_size) to change code generation
3347 or drive optimization decisions.
3349 In this instance it is safe because the code we generate has
3350 the same semantics regardless of the return value of
3351 check_memop_sizes. Exactly the same amount of data is copied
3352 and the return value is exactly the same in both cases.
3354 Furthermore, check_memop_size always uses mode 0 for the call to
3355 compute_objsize, so the imprecise nature of compute_objsize is
3356 avoided. */
3358 /* Avoid expanding mempcpy into memcpy when the call is determined
3359 to overflow the buffer. This also prevents the same overflow
3360 from being diagnosed again when expanding memcpy. */
3362 return expand_builtin_mempcpy_args (dest, src, len,
3363 target, exp, /*retmode=*/ RETURN_END);
3366 /* Helper function to do the actual work for expand of memory copy family
3367 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3368 of memory from SRC to DEST and assign to TARGET if convenient. Return
3369 value is based on RETMODE argument. */
3371 static rtx
3372 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3373 rtx target, tree exp, memop_ret retmode,
3374 bool might_overlap)
3376 unsigned int src_align = get_pointer_alignment (src);
3377 unsigned int dest_align = get_pointer_alignment (dest);
3378 rtx dest_mem, src_mem, dest_addr, len_rtx;
3379 HOST_WIDE_INT expected_size = -1;
3380 unsigned int expected_align = 0;
3381 unsigned HOST_WIDE_INT min_size;
3382 unsigned HOST_WIDE_INT max_size;
3383 unsigned HOST_WIDE_INT probable_max_size;
3385 bool is_move_done;
3387 /* If DEST is not a pointer type, call the normal function. */
3388 if (dest_align == 0)
3389 return NULL_RTX;
3391 /* If either SRC is not a pointer type, don't do this
3392 operation in-line. */
3393 if (src_align == 0)
3394 return NULL_RTX;
3396 if (currently_expanding_gimple_stmt)
3397 stringop_block_profile (currently_expanding_gimple_stmt,
3398 &expected_align, &expected_size);
3400 if (expected_align < dest_align)
3401 expected_align = dest_align;
3402 dest_mem = get_memory_rtx (dest, len);
3403 set_mem_align (dest_mem, dest_align);
3404 len_rtx = expand_normal (len);
3405 determine_block_size (len, len_rtx, &min_size, &max_size,
3406 &probable_max_size);
3408 /* Try to get the byte representation of the constant SRC points to,
3409 with its byte size in NBYTES. */
3410 unsigned HOST_WIDE_INT nbytes;
3411 const char *rep = getbyterep (src, &nbytes);
3413 /* If the function's constant bound LEN_RTX is less than or equal
3414 to the byte size of the representation of the constant argument,
3415 and if block move would be done by pieces, we can avoid loading
3416 the bytes from memory and only store the computed constant.
3417 This works in the overlap (memmove) case as well because
3418 store_by_pieces just generates a series of stores of constants
3419 from the representation returned by getbyterep(). */
3420 if (rep
3421 && CONST_INT_P (len_rtx)
3422 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes
3423 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3424 CONST_CAST (char *, rep),
3425 dest_align, false))
3427 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3428 builtin_memcpy_read_str,
3429 CONST_CAST (char *, rep),
3430 dest_align, false, retmode);
3431 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3432 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3433 return dest_mem;
3436 src_mem = get_memory_rtx (src, len);
3437 set_mem_align (src_mem, src_align);
3439 /* Copy word part most expediently. */
3440 enum block_op_methods method = BLOCK_OP_NORMAL;
3441 if (CALL_EXPR_TAILCALL (exp)
3442 && (retmode == RETURN_BEGIN || target == const0_rtx))
3443 method = BLOCK_OP_TAILCALL;
3444 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
3445 && retmode == RETURN_END
3446 && !might_overlap
3447 && target != const0_rtx);
3448 if (use_mempcpy_call)
3449 method = BLOCK_OP_NO_LIBCALL_RET;
3450 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3451 expected_align, expected_size,
3452 min_size, max_size, probable_max_size,
3453 use_mempcpy_call, &is_move_done,
3454 might_overlap);
3456 /* Bail out when a mempcpy call would be expanded as libcall and when
3457 we have a target that provides a fast implementation
3458 of mempcpy routine. */
3459 if (!is_move_done)
3460 return NULL_RTX;
3462 if (dest_addr == pc_rtx)
3463 return NULL_RTX;
3465 if (dest_addr == 0)
3467 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3468 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3471 if (retmode != RETURN_BEGIN && target != const0_rtx)
3473 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3474 /* stpcpy pointer to last byte. */
3475 if (retmode == RETURN_END_MINUS_ONE)
3476 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3479 return dest_addr;
3482 static rtx
3483 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3484 rtx target, tree orig_exp, memop_ret retmode)
3486 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3487 retmode, false);
3490 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3491 we failed, the caller should emit a normal call, otherwise try to
3492 get the result in TARGET, if convenient.
3493 Return value is based on RETMODE argument. */
3495 static rtx
3496 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
3498 class expand_operand ops[3];
3499 rtx dest_mem;
3500 rtx src_mem;
3502 if (!targetm.have_movstr ())
3503 return NULL_RTX;
3505 dest_mem = get_memory_rtx (dest, NULL);
3506 src_mem = get_memory_rtx (src, NULL);
3507 if (retmode == RETURN_BEGIN)
3509 target = force_reg (Pmode, XEXP (dest_mem, 0));
3510 dest_mem = replace_equiv_address (dest_mem, target);
3513 create_output_operand (&ops[0],
3514 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
3515 create_fixed_operand (&ops[1], dest_mem);
3516 create_fixed_operand (&ops[2], src_mem);
3517 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3518 return NULL_RTX;
3520 if (retmode != RETURN_BEGIN && target != const0_rtx)
3522 target = ops[0].value;
3523 /* movstr is supposed to set end to the address of the NUL
3524 terminator. If the caller requested a mempcpy-like return value,
3525 adjust it. */
3526 if (retmode == RETURN_END)
3528 rtx tem = plus_constant (GET_MODE (target),
3529 gen_lowpart (GET_MODE (target), target), 1);
3530 emit_move_insn (target, force_operand (tem, NULL_RTX));
3533 return target;
3536 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3537 NULL_RTX if we failed the caller should emit a normal call, otherwise
3538 try to get the result in TARGET, if convenient (and in mode MODE if that's
3539 convenient). */
3541 static rtx
3542 expand_builtin_strcpy (tree exp, rtx target)
3544 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3545 return NULL_RTX;
3547 tree dest = CALL_EXPR_ARG (exp, 0);
3548 tree src = CALL_EXPR_ARG (exp, 1);
3550 return expand_builtin_strcpy_args (exp, dest, src, target);
3553 /* Helper function to do the actual work for expand_builtin_strcpy. The
3554 arguments to the builtin_strcpy call DEST and SRC are broken out
3555 so that this can also be called without constructing an actual CALL_EXPR.
3556 The other arguments and return value are the same as for
3557 expand_builtin_strcpy. */
3559 static rtx
3560 expand_builtin_strcpy_args (tree, tree dest, tree src, rtx target)
3562 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
3565 /* Expand a call EXP to the stpcpy builtin.
3566 Return NULL_RTX if we failed the caller should emit a normal call,
3567 otherwise try to get the result in TARGET, if convenient (and in
3568 mode MODE if that's convenient). */
3570 static rtx
3571 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
3573 tree dst, src;
3574 location_t loc = EXPR_LOCATION (exp);
3576 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3577 return NULL_RTX;
3579 dst = CALL_EXPR_ARG (exp, 0);
3580 src = CALL_EXPR_ARG (exp, 1);
3582 /* If return value is ignored, transform stpcpy into strcpy. */
3583 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3585 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3586 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3587 return expand_expr (result, target, mode, EXPAND_NORMAL);
3589 else
3591 tree len, lenp1;
3592 rtx ret;
3594 /* Ensure we get an actual string whose length can be evaluated at
3595 compile-time, not an expression containing a string. This is
3596 because the latter will potentially produce pessimized code
3597 when used to produce the return value. */
3598 c_strlen_data lendata = { };
3599 if (!c_getstr (src)
3600 || !(len = c_strlen (src, 0, &lendata, 1)))
3601 return expand_movstr (dst, src, target,
3602 /*retmode=*/ RETURN_END_MINUS_ONE);
3604 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3605 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3606 target, exp,
3607 /*retmode=*/ RETURN_END_MINUS_ONE);
3609 if (ret)
3610 return ret;
3612 if (TREE_CODE (len) == INTEGER_CST)
3614 rtx len_rtx = expand_normal (len);
3616 if (CONST_INT_P (len_rtx))
3618 ret = expand_builtin_strcpy_args (exp, dst, src, target);
3620 if (ret)
3622 if (! target)
3624 if (mode != VOIDmode)
3625 target = gen_reg_rtx (mode);
3626 else
3627 target = gen_reg_rtx (GET_MODE (ret));
3629 if (GET_MODE (target) != GET_MODE (ret))
3630 ret = gen_lowpart (GET_MODE (target), ret);
3632 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3633 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3634 gcc_assert (ret);
3636 return target;
3641 return expand_movstr (dst, src, target,
3642 /*retmode=*/ RETURN_END_MINUS_ONE);
3646 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
3647 arguments while being careful to avoid duplicate warnings (which could
3648 be issued if the expander were to expand the call, resulting in it
3649 being emitted in expand_call(). */
3651 static rtx
3652 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3654 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
3656 /* The call has been successfully expanded. Check for nonstring
3657 arguments and issue warnings as appropriate. */
3658 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3659 return ret;
3662 return NULL_RTX;
3665 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3666 bytes from constant string DATA + OFFSET and return it as target
3667 constant. */
3670 builtin_strncpy_read_str (void *data, void *, HOST_WIDE_INT offset,
3671 fixed_size_mode mode)
3673 const char *str = (const char *) data;
3675 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3676 return const0_rtx;
3678 /* The by-pieces infrastructure does not try to pick a vector mode
3679 for strncpy expansion. */
3680 return c_readstr (str + offset, as_a <scalar_int_mode> (mode));
3683 /* Helper to check the sizes of sequences and the destination of calls
3684 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3685 success (no overflow or invalid sizes), false otherwise. */
3687 static bool
3688 check_strncat_sizes (tree exp, tree objsize)
3690 tree dest = CALL_EXPR_ARG (exp, 0);
3691 tree src = CALL_EXPR_ARG (exp, 1);
3692 tree maxread = CALL_EXPR_ARG (exp, 2);
3694 /* Try to determine the range of lengths that the source expression
3695 refers to. */
3696 c_strlen_data lendata = { };
3697 get_range_strlen (src, &lendata, /* eltsize = */ 1);
3699 /* Try to verify that the destination is big enough for the shortest
3700 string. */
3702 access_data data (nullptr, exp, access_read_write, maxread, true);
3703 if (!objsize && warn_stringop_overflow)
3705 /* If it hasn't been provided by __strncat_chk, try to determine
3706 the size of the destination object into which the source is
3707 being copied. */
3708 objsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
3711 /* Add one for the terminating nul. */
3712 tree srclen = (lendata.minlen
3713 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
3714 size_one_node)
3715 : NULL_TREE);
3717 /* The strncat function copies at most MAXREAD bytes and always appends
3718 the terminating nul so the specified upper bound should never be equal
3719 to (or greater than) the size of the destination. */
3720 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
3721 && tree_int_cst_equal (objsize, maxread))
3723 location_t loc = EXPR_LOCATION (exp);
3724 warning_at (loc, OPT_Wstringop_overflow_,
3725 "%qD specified bound %E equals destination size",
3726 get_callee_fndecl (exp), maxread);
3728 return false;
3731 if (!srclen
3732 || (maxread && tree_fits_uhwi_p (maxread)
3733 && tree_fits_uhwi_p (srclen)
3734 && tree_int_cst_lt (maxread, srclen)))
3735 srclen = maxread;
3737 /* The number of bytes to write is LEN but check_access will alsoa
3738 check SRCLEN if LEN's value isn't known. */
3739 return check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
3740 objsize, data.mode, &data);
3743 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3744 NULL_RTX if we failed the caller should emit a normal call. */
3746 static rtx
3747 expand_builtin_strncpy (tree exp, rtx target)
3749 location_t loc = EXPR_LOCATION (exp);
3751 if (!validate_arglist (exp,
3752 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3753 return NULL_RTX;
3754 tree dest = CALL_EXPR_ARG (exp, 0);
3755 tree src = CALL_EXPR_ARG (exp, 1);
3756 /* The number of bytes to write (not the maximum). */
3757 tree len = CALL_EXPR_ARG (exp, 2);
3759 /* The length of the source sequence. */
3760 tree slen = c_strlen (src, 1);
3762 /* We must be passed a constant len and src parameter. */
3763 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3764 return NULL_RTX;
3766 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3768 /* We're required to pad with trailing zeros if the requested
3769 len is greater than strlen(s2)+1. In that case try to
3770 use store_by_pieces, if it fails, punt. */
3771 if (tree_int_cst_lt (slen, len))
3773 unsigned int dest_align = get_pointer_alignment (dest);
3774 const char *p = c_getstr (src);
3775 rtx dest_mem;
3777 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3778 || !can_store_by_pieces (tree_to_uhwi (len),
3779 builtin_strncpy_read_str,
3780 CONST_CAST (char *, p),
3781 dest_align, false))
3782 return NULL_RTX;
3784 dest_mem = get_memory_rtx (dest, len);
3785 store_by_pieces (dest_mem, tree_to_uhwi (len),
3786 builtin_strncpy_read_str,
3787 CONST_CAST (char *, p), dest_align, false,
3788 RETURN_BEGIN);
3789 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3790 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3791 return dest_mem;
3794 return NULL_RTX;
3797 /* Return the RTL of a register in MODE generated from PREV in the
3798 previous iteration. */
3800 static rtx
3801 gen_memset_value_from_prev (by_pieces_prev *prev, fixed_size_mode mode)
3803 rtx target = nullptr;
3804 if (prev != nullptr && prev->data != nullptr)
3806 /* Use the previous data in the same mode. */
3807 if (prev->mode == mode)
3808 return prev->data;
3810 fixed_size_mode prev_mode = prev->mode;
3812 /* Don't use the previous data to write QImode if it is in a
3813 vector mode. */
3814 if (VECTOR_MODE_P (prev_mode) && mode == QImode)
3815 return target;
3817 rtx prev_rtx = prev->data;
3819 if (REG_P (prev_rtx)
3820 && HARD_REGISTER_P (prev_rtx)
3821 && lowpart_subreg_regno (REGNO (prev_rtx), prev_mode, mode) < 0)
3823 /* This case occurs when PREV_MODE is a vector and when
3824 MODE is too small to store using vector operations.
3825 After register allocation, the code will need to move the
3826 lowpart of the vector register into a non-vector register.
3828 Also, the target has chosen to use a hard register
3829 instead of going with the default choice of using a
3830 pseudo register. We should respect that choice and try to
3831 avoid creating a pseudo register with the same mode as the
3832 current hard register.
3834 In principle, we could just use a lowpart MODE subreg of
3835 the vector register. However, the vector register mode might
3836 be too wide for non-vector registers, and we already know
3837 that the non-vector mode is too small for vector registers.
3838 It's therefore likely that we'd need to spill to memory in
3839 the vector mode and reload the non-vector value from there.
3841 Try to avoid that by reducing the vector register to the
3842 smallest size that it can hold. This should increase the
3843 chances that non-vector registers can hold both the inner
3844 and outer modes of the subreg that we generate later. */
3845 machine_mode m;
3846 fixed_size_mode candidate;
3847 FOR_EACH_MODE_IN_CLASS (m, GET_MODE_CLASS (mode))
3848 if (is_a<fixed_size_mode> (m, &candidate))
3850 if (GET_MODE_SIZE (candidate)
3851 >= GET_MODE_SIZE (prev_mode))
3852 break;
3853 if (GET_MODE_SIZE (candidate) >= GET_MODE_SIZE (mode)
3854 && lowpart_subreg_regno (REGNO (prev_rtx),
3855 prev_mode, candidate) >= 0)
3857 target = lowpart_subreg (candidate, prev_rtx,
3858 prev_mode);
3859 prev_rtx = target;
3860 prev_mode = candidate;
3861 break;
3864 if (target == nullptr)
3865 prev_rtx = copy_to_reg (prev_rtx);
3868 target = lowpart_subreg (mode, prev_rtx, prev_mode);
3870 return target;
3873 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3874 bytes from constant string DATA + OFFSET and return it as target
3875 constant. If PREV isn't nullptr, it has the RTL info from the
3876 previous iteration. */
3879 builtin_memset_read_str (void *data, void *prev,
3880 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3881 fixed_size_mode mode)
3883 const char *c = (const char *) data;
3884 unsigned int size = GET_MODE_SIZE (mode);
3886 rtx target = gen_memset_value_from_prev ((by_pieces_prev *) prev,
3887 mode);
3888 if (target != nullptr)
3889 return target;
3890 rtx src = gen_int_mode (*c, QImode);
3892 if (VECTOR_MODE_P (mode))
3894 gcc_assert (GET_MODE_INNER (mode) == QImode);
3896 rtx const_vec = gen_const_vec_duplicate (mode, src);
3897 if (prev == NULL)
3898 /* Return CONST_VECTOR when called by a query function. */
3899 return const_vec;
3901 /* Use the move expander with CONST_VECTOR. */
3902 target = targetm.gen_memset_scratch_rtx (mode);
3903 emit_move_insn (target, const_vec);
3904 return target;
3907 char *p = XALLOCAVEC (char, size);
3909 memset (p, *c, size);
3911 /* Vector modes should be handled above. */
3912 return c_readstr (p, as_a <scalar_int_mode> (mode));
3915 /* Callback routine for store_by_pieces. Return the RTL of a register
3916 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3917 char value given in the RTL register data. For example, if mode is
3918 4 bytes wide, return the RTL for 0x01010101*data. If PREV isn't
3919 nullptr, it has the RTL info from the previous iteration. */
3921 static rtx
3922 builtin_memset_gen_str (void *data, void *prev,
3923 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3924 fixed_size_mode mode)
3926 rtx target, coeff;
3927 size_t size;
3928 char *p;
3930 size = GET_MODE_SIZE (mode);
3931 if (size == 1)
3932 return (rtx) data;
3934 target = gen_memset_value_from_prev ((by_pieces_prev *) prev, mode);
3935 if (target != nullptr)
3936 return target;
3938 if (VECTOR_MODE_P (mode))
3940 gcc_assert (GET_MODE_INNER (mode) == QImode);
3942 /* vec_duplicate_optab is a precondition to pick a vector mode for
3943 the memset expander. */
3944 insn_code icode = optab_handler (vec_duplicate_optab, mode);
3946 target = targetm.gen_memset_scratch_rtx (mode);
3947 class expand_operand ops[2];
3948 create_output_operand (&ops[0], target, mode);
3949 create_input_operand (&ops[1], (rtx) data, QImode);
3950 expand_insn (icode, 2, ops);
3951 if (!rtx_equal_p (target, ops[0].value))
3952 emit_move_insn (target, ops[0].value);
3954 return target;
3957 p = XALLOCAVEC (char, size);
3958 memset (p, 1, size);
3959 /* Vector modes should be handled above. */
3960 coeff = c_readstr (p, as_a <scalar_int_mode> (mode));
3962 target = convert_to_mode (mode, (rtx) data, 1);
3963 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3964 return force_reg (mode, target);
3967 /* Expand expression EXP, which is a call to the memset builtin. Return
3968 NULL_RTX if we failed the caller should emit a normal call, otherwise
3969 try to get the result in TARGET, if convenient (and in mode MODE if that's
3970 convenient). */
3973 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3975 if (!validate_arglist (exp,
3976 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3977 return NULL_RTX;
3979 tree dest = CALL_EXPR_ARG (exp, 0);
3980 tree val = CALL_EXPR_ARG (exp, 1);
3981 tree len = CALL_EXPR_ARG (exp, 2);
3983 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3986 /* Try to store VAL (or, if NULL_RTX, VALC) in LEN bytes starting at TO.
3987 Return TRUE if successful, FALSE otherwise. TO is assumed to be
3988 aligned at an ALIGN-bits boundary. LEN must be a multiple of
3989 1<<CTZ_LEN between MIN_LEN and MAX_LEN.
3991 The strategy is to issue one store_by_pieces for each power of two,
3992 from most to least significant, guarded by a test on whether there
3993 are at least that many bytes left to copy in LEN.
3995 ??? Should we skip some powers of two in favor of loops? Maybe start
3996 at the max of TO/LEN/word alignment, at least when optimizing for
3997 size, instead of ensuring O(log len) dynamic compares? */
3999 bool
4000 try_store_by_multiple_pieces (rtx to, rtx len, unsigned int ctz_len,
4001 unsigned HOST_WIDE_INT min_len,
4002 unsigned HOST_WIDE_INT max_len,
4003 rtx val, char valc, unsigned int align)
4005 int max_bits = floor_log2 (max_len);
4006 int min_bits = floor_log2 (min_len);
4007 int sctz_len = ctz_len;
4009 gcc_checking_assert (sctz_len >= 0);
4011 if (val)
4012 valc = 1;
4014 /* Bits more significant than TST_BITS are part of the shared prefix
4015 in the binary representation of both min_len and max_len. Since
4016 they're identical, we don't need to test them in the loop. */
4017 int tst_bits = (max_bits != min_bits ? max_bits
4018 : floor_log2 (max_len ^ min_len));
4020 /* Check whether it's profitable to start by storing a fixed BLKSIZE
4021 bytes, to lower max_bits. In the unlikely case of a constant LEN
4022 (implied by identical MAX_LEN and MIN_LEN), we want to issue a
4023 single store_by_pieces, but otherwise, select the minimum multiple
4024 of the ALIGN (in bytes) and of the MCD of the possible LENs, that
4025 brings MAX_LEN below TST_BITS, if that's lower than min_len. */
4026 unsigned HOST_WIDE_INT blksize;
4027 if (max_len > min_len)
4029 unsigned HOST_WIDE_INT alrng = MAX (HOST_WIDE_INT_1U << ctz_len,
4030 align / BITS_PER_UNIT);
4031 blksize = max_len - (HOST_WIDE_INT_1U << tst_bits) + alrng;
4032 blksize &= ~(alrng - 1);
4034 else if (max_len == min_len)
4035 blksize = max_len;
4036 else
4037 /* Huh, max_len < min_len? Punt. See pr100843.c. */
4038 return false;
4039 if (min_len >= blksize)
4041 min_len -= blksize;
4042 min_bits = floor_log2 (min_len);
4043 max_len -= blksize;
4044 max_bits = floor_log2 (max_len);
4046 tst_bits = (max_bits != min_bits ? max_bits
4047 : floor_log2 (max_len ^ min_len));
4049 else
4050 blksize = 0;
4052 /* Check that we can use store by pieces for the maximum store count
4053 we may issue (initial fixed-size block, plus conditional
4054 power-of-two-sized from max_bits to ctz_len. */
4055 unsigned HOST_WIDE_INT xlenest = blksize;
4056 if (max_bits >= 0)
4057 xlenest += ((HOST_WIDE_INT_1U << max_bits) * 2
4058 - (HOST_WIDE_INT_1U << ctz_len));
4059 if (!can_store_by_pieces (xlenest, builtin_memset_read_str,
4060 &valc, align, true))
4061 return false;
4063 by_pieces_constfn constfun;
4064 void *constfundata;
4065 if (val)
4067 constfun = builtin_memset_gen_str;
4068 constfundata = val = force_reg (TYPE_MODE (unsigned_char_type_node),
4069 val);
4071 else
4073 constfun = builtin_memset_read_str;
4074 constfundata = &valc;
4077 rtx ptr = copy_addr_to_reg (XEXP (to, 0));
4078 rtx rem = copy_to_mode_reg (ptr_mode, convert_to_mode (ptr_mode, len, 0));
4079 to = replace_equiv_address (to, ptr);
4080 set_mem_align (to, align);
4082 if (blksize)
4084 to = store_by_pieces (to, blksize,
4085 constfun, constfundata,
4086 align, true,
4087 max_len != 0 ? RETURN_END : RETURN_BEGIN);
4088 if (max_len == 0)
4089 return true;
4091 /* Adjust PTR, TO and REM. Since TO's address is likely
4092 PTR+offset, we have to replace it. */
4093 emit_move_insn (ptr, force_operand (XEXP (to, 0), NULL_RTX));
4094 to = replace_equiv_address (to, ptr);
4095 rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
4096 emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
4099 /* Iterate over power-of-two block sizes from the maximum length to
4100 the least significant bit possibly set in the length. */
4101 for (int i = max_bits; i >= sctz_len; i--)
4103 rtx_code_label *label = NULL;
4104 blksize = HOST_WIDE_INT_1U << i;
4106 /* If we're past the bits shared between min_ and max_len, expand
4107 a test on the dynamic length, comparing it with the
4108 BLKSIZE. */
4109 if (i <= tst_bits)
4111 label = gen_label_rtx ();
4112 emit_cmp_and_jump_insns (rem, GEN_INT (blksize), LT, NULL,
4113 ptr_mode, 1, label,
4114 profile_probability::even ());
4116 /* If we are at a bit that is in the prefix shared by min_ and
4117 max_len, skip this BLKSIZE if the bit is clear. */
4118 else if ((max_len & blksize) == 0)
4119 continue;
4121 /* Issue a store of BLKSIZE bytes. */
4122 to = store_by_pieces (to, blksize,
4123 constfun, constfundata,
4124 align, true,
4125 i != sctz_len ? RETURN_END : RETURN_BEGIN);
4127 /* Adjust REM and PTR, unless this is the last iteration. */
4128 if (i != sctz_len)
4130 emit_move_insn (ptr, force_operand (XEXP (to, 0), NULL_RTX));
4131 to = replace_equiv_address (to, ptr);
4132 rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
4133 emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
4136 if (label)
4138 emit_label (label);
4140 /* Given conditional stores, the offset can no longer be
4141 known, so clear it. */
4142 clear_mem_offset (to);
4146 return true;
4149 /* Helper function to do the actual work for expand_builtin_memset. The
4150 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4151 so that this can also be called without constructing an actual CALL_EXPR.
4152 The other arguments and return value are the same as for
4153 expand_builtin_memset. */
4155 static rtx
4156 expand_builtin_memset_args (tree dest, tree val, tree len,
4157 rtx target, machine_mode mode, tree orig_exp)
4159 tree fndecl, fn;
4160 enum built_in_function fcode;
4161 machine_mode val_mode;
4162 char c;
4163 unsigned int dest_align;
4164 rtx dest_mem, dest_addr, len_rtx;
4165 HOST_WIDE_INT expected_size = -1;
4166 unsigned int expected_align = 0;
4167 unsigned HOST_WIDE_INT min_size;
4168 unsigned HOST_WIDE_INT max_size;
4169 unsigned HOST_WIDE_INT probable_max_size;
4171 dest_align = get_pointer_alignment (dest);
4173 /* If DEST is not a pointer type, don't do this operation in-line. */
4174 if (dest_align == 0)
4175 return NULL_RTX;
4177 if (currently_expanding_gimple_stmt)
4178 stringop_block_profile (currently_expanding_gimple_stmt,
4179 &expected_align, &expected_size);
4181 if (expected_align < dest_align)
4182 expected_align = dest_align;
4184 /* If the LEN parameter is zero, return DEST. */
4185 if (integer_zerop (len))
4187 /* Evaluate and ignore VAL in case it has side-effects. */
4188 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4189 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4192 /* Stabilize the arguments in case we fail. */
4193 dest = builtin_save_expr (dest);
4194 val = builtin_save_expr (val);
4195 len = builtin_save_expr (len);
4197 len_rtx = expand_normal (len);
4198 determine_block_size (len, len_rtx, &min_size, &max_size,
4199 &probable_max_size);
4200 dest_mem = get_memory_rtx (dest, len);
4201 val_mode = TYPE_MODE (unsigned_char_type_node);
4203 if (TREE_CODE (val) != INTEGER_CST
4204 || target_char_cast (val, &c))
4206 rtx val_rtx;
4208 val_rtx = expand_normal (val);
4209 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4211 /* Assume that we can memset by pieces if we can store
4212 * the coefficients by pieces (in the required modes).
4213 * We can't pass builtin_memset_gen_str as that emits RTL. */
4214 c = 1;
4215 if (tree_fits_uhwi_p (len)
4216 && can_store_by_pieces (tree_to_uhwi (len),
4217 builtin_memset_read_str, &c, dest_align,
4218 true))
4220 val_rtx = force_reg (val_mode, val_rtx);
4221 store_by_pieces (dest_mem, tree_to_uhwi (len),
4222 builtin_memset_gen_str, val_rtx, dest_align,
4223 true, RETURN_BEGIN);
4225 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4226 dest_align, expected_align,
4227 expected_size, min_size, max_size,
4228 probable_max_size)
4229 && !try_store_by_multiple_pieces (dest_mem, len_rtx,
4230 tree_ctz (len),
4231 min_size, max_size,
4232 val_rtx, 0,
4233 dest_align))
4234 goto do_libcall;
4236 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4237 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4238 return dest_mem;
4241 if (c)
4243 if (tree_fits_uhwi_p (len)
4244 && can_store_by_pieces (tree_to_uhwi (len),
4245 builtin_memset_read_str, &c, dest_align,
4246 true))
4247 store_by_pieces (dest_mem, tree_to_uhwi (len),
4248 builtin_memset_read_str, &c, dest_align, true,
4249 RETURN_BEGIN);
4250 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4251 gen_int_mode (c, val_mode),
4252 dest_align, expected_align,
4253 expected_size, min_size, max_size,
4254 probable_max_size)
4255 && !try_store_by_multiple_pieces (dest_mem, len_rtx,
4256 tree_ctz (len),
4257 min_size, max_size,
4258 NULL_RTX, c,
4259 dest_align))
4260 goto do_libcall;
4262 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4263 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4264 return dest_mem;
4267 set_mem_align (dest_mem, dest_align);
4268 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4269 CALL_EXPR_TAILCALL (orig_exp)
4270 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4271 expected_align, expected_size,
4272 min_size, max_size,
4273 probable_max_size, tree_ctz (len));
4275 if (dest_addr == 0)
4277 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4278 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4281 return dest_addr;
4283 do_libcall:
4284 fndecl = get_callee_fndecl (orig_exp);
4285 fcode = DECL_FUNCTION_CODE (fndecl);
4286 if (fcode == BUILT_IN_MEMSET)
4287 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4288 dest, val, len);
4289 else if (fcode == BUILT_IN_BZERO)
4290 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4291 dest, len);
4292 else
4293 gcc_unreachable ();
4294 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4295 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4296 return expand_call (fn, target, target == const0_rtx);
4299 /* Expand expression EXP, which is a call to the bzero builtin. Return
4300 NULL_RTX if we failed the caller should emit a normal call. */
4302 static rtx
4303 expand_builtin_bzero (tree exp)
4305 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4306 return NULL_RTX;
4308 tree dest = CALL_EXPR_ARG (exp, 0);
4309 tree size = CALL_EXPR_ARG (exp, 1);
4311 /* New argument list transforming bzero(ptr x, int y) to
4312 memset(ptr x, int 0, size_t y). This is done this way
4313 so that if it isn't expanded inline, we fallback to
4314 calling bzero instead of memset. */
4316 location_t loc = EXPR_LOCATION (exp);
4318 return expand_builtin_memset_args (dest, integer_zero_node,
4319 fold_convert_loc (loc,
4320 size_type_node, size),
4321 const0_rtx, VOIDmode, exp);
4324 /* Try to expand cmpstr operation ICODE with the given operands.
4325 Return the result rtx on success, otherwise return null. */
4327 static rtx
4328 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4329 HOST_WIDE_INT align)
4331 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4333 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4334 target = NULL_RTX;
4336 class expand_operand ops[4];
4337 create_output_operand (&ops[0], target, insn_mode);
4338 create_fixed_operand (&ops[1], arg1_rtx);
4339 create_fixed_operand (&ops[2], arg2_rtx);
4340 create_integer_operand (&ops[3], align);
4341 if (maybe_expand_insn (icode, 4, ops))
4342 return ops[0].value;
4343 return NULL_RTX;
4346 /* Expand expression EXP, which is a call to the memcmp built-in function.
4347 Return NULL_RTX if we failed and the caller should emit a normal call,
4348 otherwise try to get the result in TARGET, if convenient.
4349 RESULT_EQ is true if we can relax the returned value to be either zero
4350 or nonzero, without caring about the sign. */
4352 static rtx
4353 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4355 if (!validate_arglist (exp,
4356 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4357 return NULL_RTX;
4359 tree arg1 = CALL_EXPR_ARG (exp, 0);
4360 tree arg2 = CALL_EXPR_ARG (exp, 1);
4361 tree len = CALL_EXPR_ARG (exp, 2);
4363 /* Due to the performance benefit, always inline the calls first
4364 when result_eq is false. */
4365 rtx result = NULL_RTX;
4366 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4367 if (!result_eq && fcode != BUILT_IN_BCMP)
4369 result = inline_expand_builtin_bytecmp (exp, target);
4370 if (result)
4371 return result;
4374 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4375 location_t loc = EXPR_LOCATION (exp);
4377 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4378 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4380 /* If we don't have POINTER_TYPE, call the function. */
4381 if (arg1_align == 0 || arg2_align == 0)
4382 return NULL_RTX;
4384 rtx arg1_rtx = get_memory_rtx (arg1, len);
4385 rtx arg2_rtx = get_memory_rtx (arg2, len);
4386 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4388 /* Set MEM_SIZE as appropriate. */
4389 if (CONST_INT_P (len_rtx))
4391 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4392 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4395 by_pieces_constfn constfn = NULL;
4397 /* Try to get the byte representation of the constant ARG2 (or, only
4398 when the function's result is used for equality to zero, ARG1)
4399 points to, with its byte size in NBYTES. */
4400 unsigned HOST_WIDE_INT nbytes;
4401 const char *rep = getbyterep (arg2, &nbytes);
4402 if (result_eq && rep == NULL)
4404 /* For equality to zero the arguments are interchangeable. */
4405 rep = getbyterep (arg1, &nbytes);
4406 if (rep != NULL)
4407 std::swap (arg1_rtx, arg2_rtx);
4410 /* If the function's constant bound LEN_RTX is less than or equal
4411 to the byte size of the representation of the constant argument,
4412 and if block move would be done by pieces, we can avoid loading
4413 the bytes from memory and only store the computed constant result. */
4414 if (rep
4415 && CONST_INT_P (len_rtx)
4416 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes)
4417 constfn = builtin_memcpy_read_str;
4419 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4420 TREE_TYPE (len), target,
4421 result_eq, constfn,
4422 CONST_CAST (char *, rep));
4424 if (result)
4426 /* Return the value in the proper mode for this function. */
4427 if (GET_MODE (result) == mode)
4428 return result;
4430 if (target != 0)
4432 convert_move (target, result, 0);
4433 return target;
4436 return convert_to_mode (mode, result, 0);
4439 return NULL_RTX;
4442 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4443 if we failed the caller should emit a normal call, otherwise try to get
4444 the result in TARGET, if convenient. */
4446 static rtx
4447 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4449 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4450 return NULL_RTX;
4452 tree arg1 = CALL_EXPR_ARG (exp, 0);
4453 tree arg2 = CALL_EXPR_ARG (exp, 1);
4455 /* Due to the performance benefit, always inline the calls first. */
4456 rtx result = NULL_RTX;
4457 result = inline_expand_builtin_bytecmp (exp, target);
4458 if (result)
4459 return result;
4461 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4462 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4463 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4464 return NULL_RTX;
4466 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4467 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4469 /* If we don't have POINTER_TYPE, call the function. */
4470 if (arg1_align == 0 || arg2_align == 0)
4471 return NULL_RTX;
4473 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4474 arg1 = builtin_save_expr (arg1);
4475 arg2 = builtin_save_expr (arg2);
4477 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4478 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4480 /* Try to call cmpstrsi. */
4481 if (cmpstr_icode != CODE_FOR_nothing)
4482 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4483 MIN (arg1_align, arg2_align));
4485 /* Try to determine at least one length and call cmpstrnsi. */
4486 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4488 tree len;
4489 rtx arg3_rtx;
4491 tree len1 = c_strlen (arg1, 1);
4492 tree len2 = c_strlen (arg2, 1);
4494 if (len1)
4495 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4496 if (len2)
4497 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4499 /* If we don't have a constant length for the first, use the length
4500 of the second, if we know it. We don't require a constant for
4501 this case; some cost analysis could be done if both are available
4502 but neither is constant. For now, assume they're equally cheap,
4503 unless one has side effects. If both strings have constant lengths,
4504 use the smaller. */
4506 if (!len1)
4507 len = len2;
4508 else if (!len2)
4509 len = len1;
4510 else if (TREE_SIDE_EFFECTS (len1))
4511 len = len2;
4512 else if (TREE_SIDE_EFFECTS (len2))
4513 len = len1;
4514 else if (TREE_CODE (len1) != INTEGER_CST)
4515 len = len2;
4516 else if (TREE_CODE (len2) != INTEGER_CST)
4517 len = len1;
4518 else if (tree_int_cst_lt (len1, len2))
4519 len = len1;
4520 else
4521 len = len2;
4523 /* If both arguments have side effects, we cannot optimize. */
4524 if (len && !TREE_SIDE_EFFECTS (len))
4526 arg3_rtx = expand_normal (len);
4527 result = expand_cmpstrn_or_cmpmem
4528 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4529 arg3_rtx, MIN (arg1_align, arg2_align));
4533 tree fndecl = get_callee_fndecl (exp);
4534 if (result)
4536 /* Return the value in the proper mode for this function. */
4537 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4538 if (GET_MODE (result) == mode)
4539 return result;
4540 if (target == 0)
4541 return convert_to_mode (mode, result, 0);
4542 convert_move (target, result, 0);
4543 return target;
4546 /* Expand the library call ourselves using a stabilized argument
4547 list to avoid re-evaluating the function's arguments twice. */
4548 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4549 copy_warning (fn, exp);
4550 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4551 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4552 return expand_call (fn, target, target == const0_rtx);
4555 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4556 NULL_RTX if we failed the caller should emit a normal call, otherwise
4557 try to get the result in TARGET, if convenient. */
4559 static rtx
4560 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4561 ATTRIBUTE_UNUSED machine_mode mode)
4563 if (!validate_arglist (exp,
4564 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4565 return NULL_RTX;
4567 tree arg1 = CALL_EXPR_ARG (exp, 0);
4568 tree arg2 = CALL_EXPR_ARG (exp, 1);
4569 tree arg3 = CALL_EXPR_ARG (exp, 2);
4571 location_t loc = EXPR_LOCATION (exp);
4572 tree len1 = c_strlen (arg1, 1);
4573 tree len2 = c_strlen (arg2, 1);
4575 /* Due to the performance benefit, always inline the calls first. */
4576 rtx result = NULL_RTX;
4577 result = inline_expand_builtin_bytecmp (exp, target);
4578 if (result)
4579 return result;
4581 /* If c_strlen can determine an expression for one of the string
4582 lengths, and it doesn't have side effects, then emit cmpstrnsi
4583 using length MIN(strlen(string)+1, arg3). */
4584 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4585 if (cmpstrn_icode == CODE_FOR_nothing)
4586 return NULL_RTX;
4588 tree len;
4590 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4591 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4593 if (len1)
4594 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4595 if (len2)
4596 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4598 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4600 /* If we don't have a constant length for the first, use the length
4601 of the second, if we know it. If neither string is constant length,
4602 use the given length argument. We don't require a constant for
4603 this case; some cost analysis could be done if both are available
4604 but neither is constant. For now, assume they're equally cheap,
4605 unless one has side effects. If both strings have constant lengths,
4606 use the smaller. */
4608 if (!len1 && !len2)
4609 len = len3;
4610 else if (!len1)
4611 len = len2;
4612 else if (!len2)
4613 len = len1;
4614 else if (TREE_SIDE_EFFECTS (len1))
4615 len = len2;
4616 else if (TREE_SIDE_EFFECTS (len2))
4617 len = len1;
4618 else if (TREE_CODE (len1) != INTEGER_CST)
4619 len = len2;
4620 else if (TREE_CODE (len2) != INTEGER_CST)
4621 len = len1;
4622 else if (tree_int_cst_lt (len1, len2))
4623 len = len1;
4624 else
4625 len = len2;
4627 /* If we are not using the given length, we must incorporate it here.
4628 The actual new length parameter will be MIN(len,arg3) in this case. */
4629 if (len != len3)
4631 len = fold_convert_loc (loc, sizetype, len);
4632 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4634 rtx arg1_rtx = get_memory_rtx (arg1, len);
4635 rtx arg2_rtx = get_memory_rtx (arg2, len);
4636 rtx arg3_rtx = expand_normal (len);
4637 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4638 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4639 MIN (arg1_align, arg2_align));
4641 tree fndecl = get_callee_fndecl (exp);
4642 if (result)
4644 /* Return the value in the proper mode for this function. */
4645 mode = TYPE_MODE (TREE_TYPE (exp));
4646 if (GET_MODE (result) == mode)
4647 return result;
4648 if (target == 0)
4649 return convert_to_mode (mode, result, 0);
4650 convert_move (target, result, 0);
4651 return target;
4654 /* Expand the library call ourselves using a stabilized argument
4655 list to avoid re-evaluating the function's arguments twice. */
4656 tree call = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4657 copy_warning (call, exp);
4658 gcc_assert (TREE_CODE (call) == CALL_EXPR);
4659 CALL_EXPR_TAILCALL (call) = CALL_EXPR_TAILCALL (exp);
4660 return expand_call (call, target, target == const0_rtx);
4663 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4664 if that's convenient. */
4667 expand_builtin_saveregs (void)
4669 rtx val;
4670 rtx_insn *seq;
4672 /* Don't do __builtin_saveregs more than once in a function.
4673 Save the result of the first call and reuse it. */
4674 if (saveregs_value != 0)
4675 return saveregs_value;
4677 /* When this function is called, it means that registers must be
4678 saved on entry to this function. So we migrate the call to the
4679 first insn of this function. */
4681 start_sequence ();
4683 /* Do whatever the machine needs done in this case. */
4684 val = targetm.calls.expand_builtin_saveregs ();
4686 seq = get_insns ();
4687 end_sequence ();
4689 saveregs_value = val;
4691 /* Put the insns after the NOTE that starts the function. If this
4692 is inside a start_sequence, make the outer-level insn chain current, so
4693 the code is placed at the start of the function. */
4694 push_topmost_sequence ();
4695 emit_insn_after (seq, entry_of_function ());
4696 pop_topmost_sequence ();
4698 return val;
4701 /* Expand a call to __builtin_next_arg. */
4703 static rtx
4704 expand_builtin_next_arg (void)
4706 /* Checking arguments is already done in fold_builtin_next_arg
4707 that must be called before this function. */
4708 return expand_binop (ptr_mode, add_optab,
4709 crtl->args.internal_arg_pointer,
4710 crtl->args.arg_offset_rtx,
4711 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4714 /* Make it easier for the backends by protecting the valist argument
4715 from multiple evaluations. */
4717 static tree
4718 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4720 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4722 /* The current way of determining the type of valist is completely
4723 bogus. We should have the information on the va builtin instead. */
4724 if (!vatype)
4725 vatype = targetm.fn_abi_va_list (cfun->decl);
4727 if (TREE_CODE (vatype) == ARRAY_TYPE)
4729 if (TREE_SIDE_EFFECTS (valist))
4730 valist = save_expr (valist);
4732 /* For this case, the backends will be expecting a pointer to
4733 vatype, but it's possible we've actually been given an array
4734 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4735 So fix it. */
4736 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4738 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4739 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4742 else
4744 tree pt = build_pointer_type (vatype);
4746 if (! needs_lvalue)
4748 if (! TREE_SIDE_EFFECTS (valist))
4749 return valist;
4751 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4752 TREE_SIDE_EFFECTS (valist) = 1;
4755 if (TREE_SIDE_EFFECTS (valist))
4756 valist = save_expr (valist);
4757 valist = fold_build2_loc (loc, MEM_REF,
4758 vatype, valist, build_int_cst (pt, 0));
4761 return valist;
4764 /* The "standard" definition of va_list is void*. */
4766 tree
4767 std_build_builtin_va_list (void)
4769 return ptr_type_node;
4772 /* The "standard" abi va_list is va_list_type_node. */
4774 tree
4775 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4777 return va_list_type_node;
4780 /* The "standard" type of va_list is va_list_type_node. */
4782 tree
4783 std_canonical_va_list_type (tree type)
4785 tree wtype, htype;
4787 wtype = va_list_type_node;
4788 htype = type;
4790 if (TREE_CODE (wtype) == ARRAY_TYPE)
4792 /* If va_list is an array type, the argument may have decayed
4793 to a pointer type, e.g. by being passed to another function.
4794 In that case, unwrap both types so that we can compare the
4795 underlying records. */
4796 if (TREE_CODE (htype) == ARRAY_TYPE
4797 || POINTER_TYPE_P (htype))
4799 wtype = TREE_TYPE (wtype);
4800 htype = TREE_TYPE (htype);
4803 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4804 return va_list_type_node;
4806 return NULL_TREE;
4809 /* The "standard" implementation of va_start: just assign `nextarg' to
4810 the variable. */
4812 void
4813 std_expand_builtin_va_start (tree valist, rtx nextarg)
4815 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4816 convert_move (va_r, nextarg, 0);
4819 /* Expand EXP, a call to __builtin_va_start. */
4821 static rtx
4822 expand_builtin_va_start (tree exp)
4824 rtx nextarg;
4825 tree valist;
4826 location_t loc = EXPR_LOCATION (exp);
4828 if (call_expr_nargs (exp) < 2)
4830 error_at (loc, "too few arguments to function %<va_start%>");
4831 return const0_rtx;
4834 if (fold_builtin_next_arg (exp, true))
4835 return const0_rtx;
4837 nextarg = expand_builtin_next_arg ();
4838 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4840 if (targetm.expand_builtin_va_start)
4841 targetm.expand_builtin_va_start (valist, nextarg);
4842 else
4843 std_expand_builtin_va_start (valist, nextarg);
4845 return const0_rtx;
4848 /* Expand EXP, a call to __builtin_va_end. */
4850 static rtx
4851 expand_builtin_va_end (tree exp)
4853 tree valist = CALL_EXPR_ARG (exp, 0);
4855 /* Evaluate for side effects, if needed. I hate macros that don't
4856 do that. */
4857 if (TREE_SIDE_EFFECTS (valist))
4858 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4860 return const0_rtx;
4863 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4864 builtin rather than just as an assignment in stdarg.h because of the
4865 nastiness of array-type va_list types. */
4867 static rtx
4868 expand_builtin_va_copy (tree exp)
4870 tree dst, src, t;
4871 location_t loc = EXPR_LOCATION (exp);
4873 dst = CALL_EXPR_ARG (exp, 0);
4874 src = CALL_EXPR_ARG (exp, 1);
4876 dst = stabilize_va_list_loc (loc, dst, 1);
4877 src = stabilize_va_list_loc (loc, src, 0);
4879 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4881 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4883 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4884 TREE_SIDE_EFFECTS (t) = 1;
4885 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4887 else
4889 rtx dstb, srcb, size;
4891 /* Evaluate to pointers. */
4892 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4893 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4894 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4895 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4897 dstb = convert_memory_address (Pmode, dstb);
4898 srcb = convert_memory_address (Pmode, srcb);
4900 /* "Dereference" to BLKmode memories. */
4901 dstb = gen_rtx_MEM (BLKmode, dstb);
4902 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4903 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4904 srcb = gen_rtx_MEM (BLKmode, srcb);
4905 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4906 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4908 /* Copy. */
4909 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4912 return const0_rtx;
4915 /* Expand a call to one of the builtin functions __builtin_frame_address or
4916 __builtin_return_address. */
4918 static rtx
4919 expand_builtin_frame_address (tree fndecl, tree exp)
4921 /* The argument must be a nonnegative integer constant.
4922 It counts the number of frames to scan up the stack.
4923 The value is either the frame pointer value or the return
4924 address saved in that frame. */
4925 if (call_expr_nargs (exp) == 0)
4926 /* Warning about missing arg was already issued. */
4927 return const0_rtx;
4928 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4930 error ("invalid argument to %qD", fndecl);
4931 return const0_rtx;
4933 else
4935 /* Number of frames to scan up the stack. */
4936 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4938 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4940 /* Some ports cannot access arbitrary stack frames. */
4941 if (tem == NULL)
4943 warning (0, "unsupported argument to %qD", fndecl);
4944 return const0_rtx;
4947 if (count)
4949 /* Warn since no effort is made to ensure that any frame
4950 beyond the current one exists or can be safely reached. */
4951 warning (OPT_Wframe_address, "calling %qD with "
4952 "a nonzero argument is unsafe", fndecl);
4955 /* For __builtin_frame_address, return what we've got. */
4956 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4957 return tem;
4959 if (!REG_P (tem)
4960 && ! CONSTANT_P (tem))
4961 tem = copy_addr_to_reg (tem);
4962 return tem;
4966 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4967 failed and the caller should emit a normal call. */
4969 static rtx
4970 expand_builtin_alloca (tree exp)
4972 rtx op0;
4973 rtx result;
4974 unsigned int align;
4975 tree fndecl = get_callee_fndecl (exp);
4976 HOST_WIDE_INT max_size;
4977 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4978 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
4979 bool valid_arglist
4980 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
4981 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
4982 VOID_TYPE)
4983 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
4984 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4985 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4987 if (!valid_arglist)
4988 return NULL_RTX;
4990 /* Compute the argument. */
4991 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4993 /* Compute the alignment. */
4994 align = (fcode == BUILT_IN_ALLOCA
4995 ? BIGGEST_ALIGNMENT
4996 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
4998 /* Compute the maximum size. */
4999 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5000 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5001 : -1);
5003 /* Allocate the desired space. If the allocation stems from the declaration
5004 of a variable-sized object, it cannot accumulate. */
5005 result
5006 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5007 result = convert_memory_address (ptr_mode, result);
5009 /* Dynamic allocations for variables are recorded during gimplification. */
5010 if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
5011 record_dynamic_alloc (exp);
5013 return result;
5016 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5017 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5018 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5019 handle_builtin_stack_restore function. */
5021 static rtx
5022 expand_asan_emit_allocas_unpoison (tree exp)
5024 tree arg0 = CALL_EXPR_ARG (exp, 0);
5025 tree arg1 = CALL_EXPR_ARG (exp, 1);
5026 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5027 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5028 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5029 stack_pointer_rtx, NULL_RTX, 0,
5030 OPTAB_LIB_WIDEN);
5031 off = convert_modes (ptr_mode, Pmode, off, 0);
5032 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5033 OPTAB_LIB_WIDEN);
5034 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5035 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5036 top, ptr_mode, bot, ptr_mode);
5037 return ret;
5040 /* Expand a call to bswap builtin in EXP.
5041 Return NULL_RTX if a normal call should be emitted rather than expanding the
5042 function in-line. If convenient, the result should be placed in TARGET.
5043 SUBTARGET may be used as the target for computing one of EXP's operands. */
5045 static rtx
5046 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5047 rtx subtarget)
5049 tree arg;
5050 rtx op0;
5052 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5053 return NULL_RTX;
5055 arg = CALL_EXPR_ARG (exp, 0);
5056 op0 = expand_expr (arg,
5057 subtarget && GET_MODE (subtarget) == target_mode
5058 ? subtarget : NULL_RTX,
5059 target_mode, EXPAND_NORMAL);
5060 if (GET_MODE (op0) != target_mode)
5061 op0 = convert_to_mode (target_mode, op0, 1);
5063 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5065 gcc_assert (target);
5067 return convert_to_mode (target_mode, target, 1);
5070 /* Expand a call to a unary builtin in EXP.
5071 Return NULL_RTX if a normal call should be emitted rather than expanding the
5072 function in-line. If convenient, the result should be placed in TARGET.
5073 SUBTARGET may be used as the target for computing one of EXP's operands. */
5075 static rtx
5076 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5077 rtx subtarget, optab op_optab)
5079 rtx op0;
5081 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5082 return NULL_RTX;
5084 /* Compute the argument. */
5085 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5086 (subtarget
5087 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5088 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5089 VOIDmode, EXPAND_NORMAL);
5090 /* Compute op, into TARGET if possible.
5091 Set TARGET to wherever the result comes back. */
5092 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5093 op_optab, op0, target, op_optab != clrsb_optab);
5094 gcc_assert (target);
5096 return convert_to_mode (target_mode, target, 0);
5099 /* Expand a call to __builtin_expect. We just return our argument
5100 as the builtin_expect semantic should've been already executed by
5101 tree branch prediction pass. */
5103 static rtx
5104 expand_builtin_expect (tree exp, rtx target)
5106 tree arg;
5108 if (call_expr_nargs (exp) < 2)
5109 return const0_rtx;
5110 arg = CALL_EXPR_ARG (exp, 0);
5112 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5113 /* When guessing was done, the hints should be already stripped away. */
5114 gcc_assert (!flag_guess_branch_prob
5115 || optimize == 0 || seen_error ());
5116 return target;
5119 /* Expand a call to __builtin_expect_with_probability. We just return our
5120 argument as the builtin_expect semantic should've been already executed by
5121 tree branch prediction pass. */
5123 static rtx
5124 expand_builtin_expect_with_probability (tree exp, rtx target)
5126 tree arg;
5128 if (call_expr_nargs (exp) < 3)
5129 return const0_rtx;
5130 arg = CALL_EXPR_ARG (exp, 0);
5132 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5133 /* When guessing was done, the hints should be already stripped away. */
5134 gcc_assert (!flag_guess_branch_prob
5135 || optimize == 0 || seen_error ());
5136 return target;
5140 /* Expand a call to __builtin_assume_aligned. We just return our first
5141 argument as the builtin_assume_aligned semantic should've been already
5142 executed by CCP. */
5144 static rtx
5145 expand_builtin_assume_aligned (tree exp, rtx target)
5147 if (call_expr_nargs (exp) < 2)
5148 return const0_rtx;
5149 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5150 EXPAND_NORMAL);
5151 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5152 && (call_expr_nargs (exp) < 3
5153 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5154 return target;
5157 void
5158 expand_builtin_trap (void)
5160 if (targetm.have_trap ())
5162 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5163 /* For trap insns when not accumulating outgoing args force
5164 REG_ARGS_SIZE note to prevent crossjumping of calls with
5165 different args sizes. */
5166 if (!ACCUMULATE_OUTGOING_ARGS)
5167 add_args_size_note (insn, stack_pointer_delta);
5169 else
5171 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5172 tree call_expr = build_call_expr (fn, 0);
5173 expand_call (call_expr, NULL_RTX, false);
5176 emit_barrier ();
5179 /* Expand a call to __builtin_unreachable. We do nothing except emit
5180 a barrier saying that control flow will not pass here.
5182 It is the responsibility of the program being compiled to ensure
5183 that control flow does never reach __builtin_unreachable. */
5184 static void
5185 expand_builtin_unreachable (void)
5187 /* Use gimple_build_builtin_unreachable or builtin_decl_unreachable
5188 to avoid this. */
5189 gcc_checking_assert (!sanitize_flags_p (SANITIZE_UNREACHABLE));
5190 emit_barrier ();
5193 /* Expand EXP, a call to fabs, fabsf or fabsl.
5194 Return NULL_RTX if a normal call should be emitted rather than expanding
5195 the function inline. If convenient, the result should be placed
5196 in TARGET. SUBTARGET may be used as the target for computing
5197 the operand. */
5199 static rtx
5200 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5202 machine_mode mode;
5203 tree arg;
5204 rtx op0;
5206 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5207 return NULL_RTX;
5209 arg = CALL_EXPR_ARG (exp, 0);
5210 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5211 mode = TYPE_MODE (TREE_TYPE (arg));
5212 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5213 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5216 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5217 Return NULL is a normal call should be emitted rather than expanding the
5218 function inline. If convenient, the result should be placed in TARGET.
5219 SUBTARGET may be used as the target for computing the operand. */
5221 static rtx
5222 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5224 rtx op0, op1;
5225 tree arg;
5227 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5228 return NULL_RTX;
5230 arg = CALL_EXPR_ARG (exp, 0);
5231 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5233 arg = CALL_EXPR_ARG (exp, 1);
5234 op1 = expand_normal (arg);
5236 return expand_copysign (op0, op1, target);
5239 /* Emit a call to __builtin___clear_cache. */
5241 void
5242 default_emit_call_builtin___clear_cache (rtx begin, rtx end)
5244 rtx callee = gen_rtx_SYMBOL_REF (Pmode,
5245 BUILTIN_ASM_NAME_PTR
5246 (BUILT_IN_CLEAR_CACHE));
5248 emit_library_call (callee,
5249 LCT_NORMAL, VOIDmode,
5250 convert_memory_address (ptr_mode, begin), ptr_mode,
5251 convert_memory_address (ptr_mode, end), ptr_mode);
5254 /* Emit a call to __builtin___clear_cache, unless the target specifies
5255 it as do-nothing. This function can be used by trampoline
5256 finalizers to duplicate the effects of expanding a call to the
5257 clear_cache builtin. */
5259 void
5260 maybe_emit_call_builtin___clear_cache (rtx begin, rtx end)
5262 gcc_assert ((GET_MODE (begin) == ptr_mode || GET_MODE (begin) == Pmode
5263 || CONST_INT_P (begin))
5264 && (GET_MODE (end) == ptr_mode || GET_MODE (end) == Pmode
5265 || CONST_INT_P (end)));
5267 if (targetm.have_clear_cache ())
5269 /* We have a "clear_cache" insn, and it will handle everything. */
5270 class expand_operand ops[2];
5272 create_address_operand (&ops[0], begin);
5273 create_address_operand (&ops[1], end);
5275 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5276 return;
5278 else
5280 #ifndef CLEAR_INSN_CACHE
5281 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5282 does nothing. There is no need to call it. Do nothing. */
5283 return;
5284 #endif /* CLEAR_INSN_CACHE */
5287 targetm.calls.emit_call_builtin___clear_cache (begin, end);
5290 /* Expand a call to __builtin___clear_cache. */
5292 static void
5293 expand_builtin___clear_cache (tree exp)
5295 tree begin, end;
5296 rtx begin_rtx, end_rtx;
5298 /* We must not expand to a library call. If we did, any
5299 fallback library function in libgcc that might contain a call to
5300 __builtin___clear_cache() would recurse infinitely. */
5301 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5303 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5304 return;
5307 begin = CALL_EXPR_ARG (exp, 0);
5308 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5310 end = CALL_EXPR_ARG (exp, 1);
5311 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5313 maybe_emit_call_builtin___clear_cache (begin_rtx, end_rtx);
5316 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5318 static rtx
5319 round_trampoline_addr (rtx tramp)
5321 rtx temp, addend, mask;
5323 /* If we don't need too much alignment, we'll have been guaranteed
5324 proper alignment by get_trampoline_type. */
5325 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5326 return tramp;
5328 /* Round address up to desired boundary. */
5329 temp = gen_reg_rtx (Pmode);
5330 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5331 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5333 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5334 temp, 0, OPTAB_LIB_WIDEN);
5335 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5336 temp, 0, OPTAB_LIB_WIDEN);
5338 return tramp;
5341 static rtx
5342 expand_builtin_init_trampoline (tree exp, bool onstack)
5344 tree t_tramp, t_func, t_chain;
5345 rtx m_tramp, r_tramp, r_chain, tmp;
5347 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5348 POINTER_TYPE, VOID_TYPE))
5349 return NULL_RTX;
5351 t_tramp = CALL_EXPR_ARG (exp, 0);
5352 t_func = CALL_EXPR_ARG (exp, 1);
5353 t_chain = CALL_EXPR_ARG (exp, 2);
5355 r_tramp = expand_normal (t_tramp);
5356 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5357 MEM_NOTRAP_P (m_tramp) = 1;
5359 /* If ONSTACK, the TRAMP argument should be the address of a field
5360 within the local function's FRAME decl. Either way, let's see if
5361 we can fill in the MEM_ATTRs for this memory. */
5362 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5363 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5365 /* Creator of a heap trampoline is responsible for making sure the
5366 address is aligned to at least STACK_BOUNDARY. Normally malloc
5367 will ensure this anyhow. */
5368 tmp = round_trampoline_addr (r_tramp);
5369 if (tmp != r_tramp)
5371 m_tramp = change_address (m_tramp, BLKmode, tmp);
5372 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5373 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5376 /* The FUNC argument should be the address of the nested function.
5377 Extract the actual function decl to pass to the hook. */
5378 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5379 t_func = TREE_OPERAND (t_func, 0);
5380 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5382 r_chain = expand_normal (t_chain);
5384 /* Generate insns to initialize the trampoline. */
5385 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5387 if (onstack)
5389 trampolines_created = 1;
5391 if (targetm.calls.custom_function_descriptors != 0)
5392 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5393 "trampoline generated for nested function %qD", t_func);
5396 return const0_rtx;
5399 static rtx
5400 expand_builtin_adjust_trampoline (tree exp)
5402 rtx tramp;
5404 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5405 return NULL_RTX;
5407 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5408 tramp = round_trampoline_addr (tramp);
5409 if (targetm.calls.trampoline_adjust_address)
5410 tramp = targetm.calls.trampoline_adjust_address (tramp);
5412 return tramp;
5415 /* Expand a call to the builtin descriptor initialization routine.
5416 A descriptor is made up of a couple of pointers to the static
5417 chain and the code entry in this order. */
5419 static rtx
5420 expand_builtin_init_descriptor (tree exp)
5422 tree t_descr, t_func, t_chain;
5423 rtx m_descr, r_descr, r_func, r_chain;
5425 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5426 VOID_TYPE))
5427 return NULL_RTX;
5429 t_descr = CALL_EXPR_ARG (exp, 0);
5430 t_func = CALL_EXPR_ARG (exp, 1);
5431 t_chain = CALL_EXPR_ARG (exp, 2);
5433 r_descr = expand_normal (t_descr);
5434 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5435 MEM_NOTRAP_P (m_descr) = 1;
5436 set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
5438 r_func = expand_normal (t_func);
5439 r_chain = expand_normal (t_chain);
5441 /* Generate insns to initialize the descriptor. */
5442 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5443 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5444 POINTER_SIZE / BITS_PER_UNIT), r_func);
5446 return const0_rtx;
5449 /* Expand a call to the builtin descriptor adjustment routine. */
5451 static rtx
5452 expand_builtin_adjust_descriptor (tree exp)
5454 rtx tramp;
5456 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5457 return NULL_RTX;
5459 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5461 /* Unalign the descriptor to allow runtime identification. */
5462 tramp = plus_constant (ptr_mode, tramp,
5463 targetm.calls.custom_function_descriptors);
5465 return force_operand (tramp, NULL_RTX);
5468 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5469 function. The function first checks whether the back end provides
5470 an insn to implement signbit for the respective mode. If not, it
5471 checks whether the floating point format of the value is such that
5472 the sign bit can be extracted. If that is not the case, error out.
5473 EXP is the expression that is a call to the builtin function; if
5474 convenient, the result should be placed in TARGET. */
5475 static rtx
5476 expand_builtin_signbit (tree exp, rtx target)
5478 const struct real_format *fmt;
5479 scalar_float_mode fmode;
5480 scalar_int_mode rmode, imode;
5481 tree arg;
5482 int word, bitpos;
5483 enum insn_code icode;
5484 rtx temp;
5485 location_t loc = EXPR_LOCATION (exp);
5487 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5488 return NULL_RTX;
5490 arg = CALL_EXPR_ARG (exp, 0);
5491 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5492 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5493 fmt = REAL_MODE_FORMAT (fmode);
5495 arg = builtin_save_expr (arg);
5497 /* Expand the argument yielding a RTX expression. */
5498 temp = expand_normal (arg);
5500 /* Check if the back end provides an insn that handles signbit for the
5501 argument's mode. */
5502 icode = optab_handler (signbit_optab, fmode);
5503 if (icode != CODE_FOR_nothing)
5505 rtx_insn *last = get_last_insn ();
5506 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5507 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5508 return target;
5509 delete_insns_since (last);
5512 /* For floating point formats without a sign bit, implement signbit
5513 as "ARG < 0.0". */
5514 bitpos = fmt->signbit_ro;
5515 if (bitpos < 0)
5517 /* But we can't do this if the format supports signed zero. */
5518 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5520 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5521 build_real (TREE_TYPE (arg), dconst0));
5522 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5525 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5527 imode = int_mode_for_mode (fmode).require ();
5528 temp = gen_lowpart (imode, temp);
5530 else
5532 imode = word_mode;
5533 /* Handle targets with different FP word orders. */
5534 if (FLOAT_WORDS_BIG_ENDIAN)
5535 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5536 else
5537 word = bitpos / BITS_PER_WORD;
5538 temp = operand_subword_force (temp, word, fmode);
5539 bitpos = bitpos % BITS_PER_WORD;
5542 /* Force the intermediate word_mode (or narrower) result into a
5543 register. This avoids attempting to create paradoxical SUBREGs
5544 of floating point modes below. */
5545 temp = force_reg (imode, temp);
5547 /* If the bitpos is within the "result mode" lowpart, the operation
5548 can be implement with a single bitwise AND. Otherwise, we need
5549 a right shift and an AND. */
5551 if (bitpos < GET_MODE_BITSIZE (rmode))
5553 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5555 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5556 temp = gen_lowpart (rmode, temp);
5557 temp = expand_binop (rmode, and_optab, temp,
5558 immed_wide_int_const (mask, rmode),
5559 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5561 else
5563 /* Perform a logical right shift to place the signbit in the least
5564 significant bit, then truncate the result to the desired mode
5565 and mask just this bit. */
5566 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5567 temp = gen_lowpart (rmode, temp);
5568 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5569 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5572 return temp;
5575 /* Expand fork or exec calls. TARGET is the desired target of the
5576 call. EXP is the call. FN is the
5577 identificator of the actual function. IGNORE is nonzero if the
5578 value is to be ignored. */
5580 static rtx
5581 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5583 tree id, decl;
5584 tree call;
5586 /* If we are not profiling, just call the function. */
5587 if (!profile_arc_flag)
5588 return NULL_RTX;
5590 /* Otherwise call the wrapper. This should be equivalent for the rest of
5591 compiler, so the code does not diverge, and the wrapper may run the
5592 code necessary for keeping the profiling sane. */
5594 switch (DECL_FUNCTION_CODE (fn))
5596 case BUILT_IN_FORK:
5597 id = get_identifier ("__gcov_fork");
5598 break;
5600 case BUILT_IN_EXECL:
5601 id = get_identifier ("__gcov_execl");
5602 break;
5604 case BUILT_IN_EXECV:
5605 id = get_identifier ("__gcov_execv");
5606 break;
5608 case BUILT_IN_EXECLP:
5609 id = get_identifier ("__gcov_execlp");
5610 break;
5612 case BUILT_IN_EXECLE:
5613 id = get_identifier ("__gcov_execle");
5614 break;
5616 case BUILT_IN_EXECVP:
5617 id = get_identifier ("__gcov_execvp");
5618 break;
5620 case BUILT_IN_EXECVE:
5621 id = get_identifier ("__gcov_execve");
5622 break;
5624 default:
5625 gcc_unreachable ();
5628 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5629 FUNCTION_DECL, id, TREE_TYPE (fn));
5630 DECL_EXTERNAL (decl) = 1;
5631 TREE_PUBLIC (decl) = 1;
5632 DECL_ARTIFICIAL (decl) = 1;
5633 TREE_NOTHROW (decl) = 1;
5634 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5635 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5636 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5637 return expand_call (call, target, ignore);
5642 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5643 the pointer in these functions is void*, the tree optimizers may remove
5644 casts. The mode computed in expand_builtin isn't reliable either, due
5645 to __sync_bool_compare_and_swap.
5647 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5648 group of builtins. This gives us log2 of the mode size. */
5650 static inline machine_mode
5651 get_builtin_sync_mode (int fcode_diff)
5653 /* The size is not negotiable, so ask not to get BLKmode in return
5654 if the target indicates that a smaller size would be better. */
5655 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5658 /* Expand the memory expression LOC and return the appropriate memory operand
5659 for the builtin_sync operations. */
5661 static rtx
5662 get_builtin_sync_mem (tree loc, machine_mode mode)
5664 rtx addr, mem;
5665 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
5666 ? TREE_TYPE (TREE_TYPE (loc))
5667 : TREE_TYPE (loc));
5668 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
5670 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
5671 addr = convert_memory_address (addr_mode, addr);
5673 /* Note that we explicitly do not want any alias information for this
5674 memory, so that we kill all other live memories. Otherwise we don't
5675 satisfy the full barrier semantics of the intrinsic. */
5676 mem = gen_rtx_MEM (mode, addr);
5678 set_mem_addr_space (mem, addr_space);
5680 mem = validize_mem (mem);
5682 /* The alignment needs to be at least according to that of the mode. */
5683 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5684 get_pointer_alignment (loc)));
5685 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5686 MEM_VOLATILE_P (mem) = 1;
5688 return mem;
5691 /* Make sure an argument is in the right mode.
5692 EXP is the tree argument.
5693 MODE is the mode it should be in. */
5695 static rtx
5696 expand_expr_force_mode (tree exp, machine_mode mode)
5698 rtx val;
5699 machine_mode old_mode;
5701 if (TREE_CODE (exp) == SSA_NAME
5702 && TYPE_MODE (TREE_TYPE (exp)) != mode)
5704 /* Undo argument promotion if possible, as combine might not
5705 be able to do it later due to MEM_VOLATILE_P uses in the
5706 patterns. */
5707 gimple *g = get_gimple_for_ssa_name (exp);
5708 if (g && gimple_assign_cast_p (g))
5710 tree rhs = gimple_assign_rhs1 (g);
5711 tree_code code = gimple_assign_rhs_code (g);
5712 if (CONVERT_EXPR_CODE_P (code)
5713 && TYPE_MODE (TREE_TYPE (rhs)) == mode
5714 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
5715 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
5716 && (TYPE_PRECISION (TREE_TYPE (exp))
5717 > TYPE_PRECISION (TREE_TYPE (rhs))))
5718 exp = rhs;
5722 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5723 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5724 of CONST_INTs, where we know the old_mode only from the call argument. */
5726 old_mode = GET_MODE (val);
5727 if (old_mode == VOIDmode)
5728 old_mode = TYPE_MODE (TREE_TYPE (exp));
5729 val = convert_modes (mode, old_mode, val, 1);
5730 return val;
5734 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5735 EXP is the CALL_EXPR. CODE is the rtx code
5736 that corresponds to the arithmetic or logical operation from the name;
5737 an exception here is that NOT actually means NAND. TARGET is an optional
5738 place for us to store the results; AFTER is true if this is the
5739 fetch_and_xxx form. */
5741 static rtx
5742 expand_builtin_sync_operation (machine_mode mode, tree exp,
5743 enum rtx_code code, bool after,
5744 rtx target)
5746 rtx val, mem;
5747 location_t loc = EXPR_LOCATION (exp);
5749 if (code == NOT && warn_sync_nand)
5751 tree fndecl = get_callee_fndecl (exp);
5752 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5754 static bool warned_f_a_n, warned_n_a_f;
5756 switch (fcode)
5758 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5759 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5760 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5761 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5762 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5763 if (warned_f_a_n)
5764 break;
5766 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5767 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5768 warned_f_a_n = true;
5769 break;
5771 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5772 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5773 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5774 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5775 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5776 if (warned_n_a_f)
5777 break;
5779 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5780 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5781 warned_n_a_f = true;
5782 break;
5784 default:
5785 gcc_unreachable ();
5789 /* Expand the operands. */
5790 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5791 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5793 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5794 after);
5797 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5798 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5799 true if this is the boolean form. TARGET is a place for us to store the
5800 results; this is NOT optional if IS_BOOL is true. */
5802 static rtx
5803 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5804 bool is_bool, rtx target)
5806 rtx old_val, new_val, mem;
5807 rtx *pbool, *poval;
5809 /* Expand the operands. */
5810 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5811 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5812 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5814 pbool = poval = NULL;
5815 if (target != const0_rtx)
5817 if (is_bool)
5818 pbool = &target;
5819 else
5820 poval = &target;
5822 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5823 false, MEMMODEL_SYNC_SEQ_CST,
5824 MEMMODEL_SYNC_SEQ_CST))
5825 return NULL_RTX;
5827 return target;
5830 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5831 general form is actually an atomic exchange, and some targets only
5832 support a reduced form with the second argument being a constant 1.
5833 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5834 the results. */
5836 static rtx
5837 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5838 rtx target)
5840 rtx val, mem;
5842 /* Expand the operands. */
5843 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5844 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5846 return expand_sync_lock_test_and_set (target, mem, val);
5849 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5851 static void
5852 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5854 rtx mem;
5856 /* Expand the operands. */
5857 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5859 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5862 /* Given an integer representing an ``enum memmodel'', verify its
5863 correctness and return the memory model enum. */
5865 static enum memmodel
5866 get_memmodel (tree exp)
5868 /* If the parameter is not a constant, it's a run time value so we'll just
5869 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5870 if (TREE_CODE (exp) != INTEGER_CST)
5871 return MEMMODEL_SEQ_CST;
5873 rtx op = expand_normal (exp);
5875 unsigned HOST_WIDE_INT val = INTVAL (op);
5876 if (targetm.memmodel_check)
5877 val = targetm.memmodel_check (val);
5878 else if (val & ~MEMMODEL_MASK)
5879 return MEMMODEL_SEQ_CST;
5881 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5882 if (memmodel_base (val) >= MEMMODEL_LAST)
5883 return MEMMODEL_SEQ_CST;
5885 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5886 be conservative and promote consume to acquire. */
5887 if (val == MEMMODEL_CONSUME)
5888 val = MEMMODEL_ACQUIRE;
5890 return (enum memmodel) val;
5893 /* Expand the __atomic_exchange intrinsic:
5894 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5895 EXP is the CALL_EXPR.
5896 TARGET is an optional place for us to store the results. */
5898 static rtx
5899 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5901 rtx val, mem;
5902 enum memmodel model;
5904 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5906 if (!flag_inline_atomics)
5907 return NULL_RTX;
5909 /* Expand the operands. */
5910 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5911 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5913 return expand_atomic_exchange (target, mem, val, model);
5916 /* Expand the __atomic_compare_exchange intrinsic:
5917 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5918 TYPE desired, BOOL weak,
5919 enum memmodel success,
5920 enum memmodel failure)
5921 EXP is the CALL_EXPR.
5922 TARGET is an optional place for us to store the results. */
5924 static rtx
5925 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5926 rtx target)
5928 rtx expect, desired, mem, oldval;
5929 rtx_code_label *label;
5930 tree weak;
5931 bool is_weak;
5933 memmodel success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5934 memmodel failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5936 if (failure > success)
5937 success = MEMMODEL_SEQ_CST;
5939 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5941 failure = MEMMODEL_SEQ_CST;
5942 success = MEMMODEL_SEQ_CST;
5946 if (!flag_inline_atomics)
5947 return NULL_RTX;
5949 /* Expand the operands. */
5950 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5952 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5953 expect = convert_memory_address (Pmode, expect);
5954 expect = gen_rtx_MEM (mode, expect);
5955 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5957 weak = CALL_EXPR_ARG (exp, 3);
5958 is_weak = false;
5959 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5960 is_weak = true;
5962 if (target == const0_rtx)
5963 target = NULL;
5965 /* Lest the rtl backend create a race condition with an imporoper store
5966 to memory, always create a new pseudo for OLDVAL. */
5967 oldval = NULL;
5969 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5970 is_weak, success, failure))
5971 return NULL_RTX;
5973 /* Conditionally store back to EXPECT, lest we create a race condition
5974 with an improper store to memory. */
5975 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5976 the normal case where EXPECT is totally private, i.e. a register. At
5977 which point the store can be unconditional. */
5978 label = gen_label_rtx ();
5979 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5980 GET_MODE (target), 1, label);
5981 emit_move_insn (expect, oldval);
5982 emit_label (label);
5984 return target;
5987 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5988 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5989 call. The weak parameter must be dropped to match the expected parameter
5990 list and the expected argument changed from value to pointer to memory
5991 slot. */
5993 static void
5994 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5996 unsigned int z;
5997 vec<tree, va_gc> *vec;
5999 vec_alloc (vec, 5);
6000 vec->quick_push (gimple_call_arg (call, 0));
6001 tree expected = gimple_call_arg (call, 1);
6002 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6003 TREE_TYPE (expected));
6004 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6005 if (expd != x)
6006 emit_move_insn (x, expd);
6007 tree v = make_tree (TREE_TYPE (expected), x);
6008 vec->quick_push (build1 (ADDR_EXPR,
6009 build_pointer_type (TREE_TYPE (expected)), v));
6010 vec->quick_push (gimple_call_arg (call, 2));
6011 /* Skip the boolean weak parameter. */
6012 for (z = 4; z < 6; z++)
6013 vec->quick_push (gimple_call_arg (call, z));
6014 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6015 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6016 gcc_assert (bytes_log2 < 5);
6017 built_in_function fncode
6018 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6019 + bytes_log2);
6020 tree fndecl = builtin_decl_explicit (fncode);
6021 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6022 fndecl);
6023 tree exp = build_call_vec (boolean_type_node, fn, vec);
6024 tree lhs = gimple_call_lhs (call);
6025 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6026 if (lhs)
6028 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6029 if (GET_MODE (boolret) != mode)
6030 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6031 x = force_reg (mode, x);
6032 write_complex_part (target, boolret, true);
6033 write_complex_part (target, x, false);
6037 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6039 void
6040 expand_ifn_atomic_compare_exchange (gcall *call)
6042 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6043 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6044 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6046 memmodel success = get_memmodel (gimple_call_arg (call, 4));
6047 memmodel failure = get_memmodel (gimple_call_arg (call, 5));
6049 if (failure > success)
6050 success = MEMMODEL_SEQ_CST;
6052 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6054 failure = MEMMODEL_SEQ_CST;
6055 success = MEMMODEL_SEQ_CST;
6058 if (!flag_inline_atomics)
6060 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6061 return;
6064 /* Expand the operands. */
6065 rtx mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6067 rtx expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6068 rtx desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6070 bool is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6072 rtx boolret = NULL;
6073 rtx oldval = NULL;
6075 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6076 is_weak, success, failure))
6078 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6079 return;
6082 tree lhs = gimple_call_lhs (call);
6083 if (lhs)
6085 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6086 if (GET_MODE (boolret) != mode)
6087 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6088 write_complex_part (target, boolret, true);
6089 write_complex_part (target, oldval, false);
6093 /* Expand the __atomic_load intrinsic:
6094 TYPE __atomic_load (TYPE *object, enum memmodel)
6095 EXP is the CALL_EXPR.
6096 TARGET is an optional place for us to store the results. */
6098 static rtx
6099 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6101 memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6102 if (is_mm_release (model) || is_mm_acq_rel (model))
6103 model = MEMMODEL_SEQ_CST;
6105 if (!flag_inline_atomics)
6106 return NULL_RTX;
6108 /* Expand the operand. */
6109 rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6111 return expand_atomic_load (target, mem, model);
6115 /* Expand the __atomic_store intrinsic:
6116 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6117 EXP is the CALL_EXPR.
6118 TARGET is an optional place for us to store the results. */
6120 static rtx
6121 expand_builtin_atomic_store (machine_mode mode, tree exp)
6123 memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6124 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6125 || is_mm_release (model)))
6126 model = MEMMODEL_SEQ_CST;
6128 if (!flag_inline_atomics)
6129 return NULL_RTX;
6131 /* Expand the operands. */
6132 rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6133 rtx val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6135 return expand_atomic_store (mem, val, model, false);
6138 /* Expand the __atomic_fetch_XXX intrinsic:
6139 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6140 EXP is the CALL_EXPR.
6141 TARGET is an optional place for us to store the results.
6142 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6143 FETCH_AFTER is true if returning the result of the operation.
6144 FETCH_AFTER is false if returning the value before the operation.
6145 IGNORE is true if the result is not used.
6146 EXT_CALL is the correct builtin for an external call if this cannot be
6147 resolved to an instruction sequence. */
6149 static rtx
6150 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6151 enum rtx_code code, bool fetch_after,
6152 bool ignore, enum built_in_function ext_call)
6154 rtx val, mem, ret;
6155 enum memmodel model;
6156 tree fndecl;
6157 tree addr;
6159 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6161 /* Expand the operands. */
6162 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6163 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6165 /* Only try generating instructions if inlining is turned on. */
6166 if (flag_inline_atomics)
6168 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6169 if (ret)
6170 return ret;
6173 /* Return if a different routine isn't needed for the library call. */
6174 if (ext_call == BUILT_IN_NONE)
6175 return NULL_RTX;
6177 /* Change the call to the specified function. */
6178 fndecl = get_callee_fndecl (exp);
6179 addr = CALL_EXPR_FN (exp);
6180 STRIP_NOPS (addr);
6182 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6183 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6185 /* If we will emit code after the call, the call cannot be a tail call.
6186 If it is emitted as a tail call, a barrier is emitted after it, and
6187 then all trailing code is removed. */
6188 if (!ignore)
6189 CALL_EXPR_TAILCALL (exp) = 0;
6191 /* Expand the call here so we can emit trailing code. */
6192 ret = expand_call (exp, target, ignore);
6194 /* Replace the original function just in case it matters. */
6195 TREE_OPERAND (addr, 0) = fndecl;
6197 /* Then issue the arithmetic correction to return the right result. */
6198 if (!ignore)
6200 if (code == NOT)
6202 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6203 OPTAB_LIB_WIDEN);
6204 ret = expand_simple_unop (mode, NOT, ret, target, true);
6206 else
6207 ret = expand_simple_binop (mode, code, ret, val, target, true,
6208 OPTAB_LIB_WIDEN);
6210 return ret;
6213 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6215 void
6216 expand_ifn_atomic_bit_test_and (gcall *call)
6218 tree ptr = gimple_call_arg (call, 0);
6219 tree bit = gimple_call_arg (call, 1);
6220 tree flag = gimple_call_arg (call, 2);
6221 tree lhs = gimple_call_lhs (call);
6222 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6223 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6224 enum rtx_code code;
6225 optab optab;
6226 class expand_operand ops[5];
6228 gcc_assert (flag_inline_atomics);
6230 if (gimple_call_num_args (call) == 5)
6231 model = get_memmodel (gimple_call_arg (call, 3));
6233 rtx mem = get_builtin_sync_mem (ptr, mode);
6234 rtx val = expand_expr_force_mode (bit, mode);
6236 switch (gimple_call_internal_fn (call))
6238 case IFN_ATOMIC_BIT_TEST_AND_SET:
6239 code = IOR;
6240 optab = atomic_bit_test_and_set_optab;
6241 break;
6242 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6243 code = XOR;
6244 optab = atomic_bit_test_and_complement_optab;
6245 break;
6246 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6247 code = AND;
6248 optab = atomic_bit_test_and_reset_optab;
6249 break;
6250 default:
6251 gcc_unreachable ();
6254 if (lhs == NULL_TREE)
6256 rtx val2 = expand_simple_binop (mode, ASHIFT, const1_rtx,
6257 val, NULL_RTX, true, OPTAB_DIRECT);
6258 if (code == AND)
6259 val2 = expand_simple_unop (mode, NOT, val2, NULL_RTX, true);
6260 if (expand_atomic_fetch_op (const0_rtx, mem, val2, code, model, false))
6261 return;
6264 rtx target;
6265 if (lhs)
6266 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6267 else
6268 target = gen_reg_rtx (mode);
6269 enum insn_code icode = direct_optab_handler (optab, mode);
6270 gcc_assert (icode != CODE_FOR_nothing);
6271 create_output_operand (&ops[0], target, mode);
6272 create_fixed_operand (&ops[1], mem);
6273 create_convert_operand_to (&ops[2], val, mode, true);
6274 create_integer_operand (&ops[3], model);
6275 create_integer_operand (&ops[4], integer_onep (flag));
6276 if (maybe_expand_insn (icode, 5, ops))
6277 return;
6279 rtx bitval = val;
6280 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6281 val, NULL_RTX, true, OPTAB_DIRECT);
6282 rtx maskval = val;
6283 if (code == AND)
6284 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6285 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6286 code, model, false);
6287 if (!result)
6289 bool is_atomic = gimple_call_num_args (call) == 5;
6290 tree tcall = gimple_call_arg (call, 3 + is_atomic);
6291 tree fndecl = gimple_call_addr_fndecl (tcall);
6292 tree type = TREE_TYPE (TREE_TYPE (fndecl));
6293 tree exp = build_call_nary (type, tcall, 2 + is_atomic, ptr,
6294 make_tree (type, val),
6295 is_atomic
6296 ? gimple_call_arg (call, 3)
6297 : integer_zero_node);
6298 result = expand_builtin (exp, gen_reg_rtx (mode), NULL_RTX,
6299 mode, !lhs);
6301 if (!lhs)
6302 return;
6303 if (integer_onep (flag))
6305 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6306 NULL_RTX, true, OPTAB_DIRECT);
6307 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6308 true, OPTAB_DIRECT);
6310 else
6311 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6312 OPTAB_DIRECT);
6313 if (result != target)
6314 emit_move_insn (target, result);
6317 /* Expand IFN_ATOMIC_*_FETCH_CMP_0 internal function. */
6319 void
6320 expand_ifn_atomic_op_fetch_cmp_0 (gcall *call)
6322 tree cmp = gimple_call_arg (call, 0);
6323 tree ptr = gimple_call_arg (call, 1);
6324 tree arg = gimple_call_arg (call, 2);
6325 tree lhs = gimple_call_lhs (call);
6326 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6327 machine_mode mode = TYPE_MODE (TREE_TYPE (cmp));
6328 optab optab;
6329 rtx_code code;
6330 class expand_operand ops[5];
6332 gcc_assert (flag_inline_atomics);
6334 if (gimple_call_num_args (call) == 5)
6335 model = get_memmodel (gimple_call_arg (call, 3));
6337 rtx mem = get_builtin_sync_mem (ptr, mode);
6338 rtx op = expand_expr_force_mode (arg, mode);
6340 switch (gimple_call_internal_fn (call))
6342 case IFN_ATOMIC_ADD_FETCH_CMP_0:
6343 code = PLUS;
6344 optab = atomic_add_fetch_cmp_0_optab;
6345 break;
6346 case IFN_ATOMIC_SUB_FETCH_CMP_0:
6347 code = MINUS;
6348 optab = atomic_sub_fetch_cmp_0_optab;
6349 break;
6350 case IFN_ATOMIC_AND_FETCH_CMP_0:
6351 code = AND;
6352 optab = atomic_and_fetch_cmp_0_optab;
6353 break;
6354 case IFN_ATOMIC_OR_FETCH_CMP_0:
6355 code = IOR;
6356 optab = atomic_or_fetch_cmp_0_optab;
6357 break;
6358 case IFN_ATOMIC_XOR_FETCH_CMP_0:
6359 code = XOR;
6360 optab = atomic_xor_fetch_cmp_0_optab;
6361 break;
6362 default:
6363 gcc_unreachable ();
6366 enum rtx_code comp = UNKNOWN;
6367 switch (tree_to_uhwi (cmp))
6369 case ATOMIC_OP_FETCH_CMP_0_EQ: comp = EQ; break;
6370 case ATOMIC_OP_FETCH_CMP_0_NE: comp = NE; break;
6371 case ATOMIC_OP_FETCH_CMP_0_GT: comp = GT; break;
6372 case ATOMIC_OP_FETCH_CMP_0_GE: comp = GE; break;
6373 case ATOMIC_OP_FETCH_CMP_0_LT: comp = LT; break;
6374 case ATOMIC_OP_FETCH_CMP_0_LE: comp = LE; break;
6375 default: gcc_unreachable ();
6378 rtx target;
6379 if (lhs == NULL_TREE)
6380 target = gen_reg_rtx (TYPE_MODE (boolean_type_node));
6381 else
6382 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6383 enum insn_code icode = direct_optab_handler (optab, mode);
6384 gcc_assert (icode != CODE_FOR_nothing);
6385 create_output_operand (&ops[0], target, TYPE_MODE (boolean_type_node));
6386 create_fixed_operand (&ops[1], mem);
6387 create_convert_operand_to (&ops[2], op, mode, true);
6388 create_integer_operand (&ops[3], model);
6389 create_integer_operand (&ops[4], comp);
6390 if (maybe_expand_insn (icode, 5, ops))
6391 return;
6393 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, op,
6394 code, model, true);
6395 if (!result)
6397 bool is_atomic = gimple_call_num_args (call) == 5;
6398 tree tcall = gimple_call_arg (call, 3 + is_atomic);
6399 tree fndecl = gimple_call_addr_fndecl (tcall);
6400 tree type = TREE_TYPE (TREE_TYPE (fndecl));
6401 tree exp = build_call_nary (type, tcall,
6402 2 + is_atomic, ptr, arg,
6403 is_atomic
6404 ? gimple_call_arg (call, 3)
6405 : integer_zero_node);
6406 result = expand_builtin (exp, gen_reg_rtx (mode), NULL_RTX,
6407 mode, !lhs);
6410 if (lhs)
6412 result = emit_store_flag_force (target, comp, result, const0_rtx, mode,
6413 0, 1);
6414 if (result != target)
6415 emit_move_insn (target, result);
6419 /* Expand an atomic clear operation.
6420 void _atomic_clear (BOOL *obj, enum memmodel)
6421 EXP is the call expression. */
6423 static rtx
6424 expand_builtin_atomic_clear (tree exp)
6426 machine_mode mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6427 rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6428 memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6430 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6431 model = MEMMODEL_SEQ_CST;
6433 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6434 Failing that, a store is issued by __atomic_store. The only way this can
6435 fail is if the bool type is larger than a word size. Unlikely, but
6436 handle it anyway for completeness. Assume a single threaded model since
6437 there is no atomic support in this case, and no barriers are required. */
6438 rtx ret = expand_atomic_store (mem, const0_rtx, model, true);
6439 if (!ret)
6440 emit_move_insn (mem, const0_rtx);
6441 return const0_rtx;
6444 /* Expand an atomic test_and_set operation.
6445 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6446 EXP is the call expression. */
6448 static rtx
6449 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6451 rtx mem;
6452 enum memmodel model;
6453 machine_mode mode;
6455 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6456 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6457 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6459 return expand_atomic_test_and_set (target, mem, model);
6463 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6464 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6466 static tree
6467 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6469 int size;
6470 machine_mode mode;
6471 unsigned int mode_align, type_align;
6473 if (TREE_CODE (arg0) != INTEGER_CST)
6474 return NULL_TREE;
6476 /* We need a corresponding integer mode for the access to be lock-free. */
6477 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6478 if (!int_mode_for_size (size, 0).exists (&mode))
6479 return boolean_false_node;
6481 mode_align = GET_MODE_ALIGNMENT (mode);
6483 if (TREE_CODE (arg1) == INTEGER_CST)
6485 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6487 /* Either this argument is null, or it's a fake pointer encoding
6488 the alignment of the object. */
6489 val = least_bit_hwi (val);
6490 val *= BITS_PER_UNIT;
6492 if (val == 0 || mode_align < val)
6493 type_align = mode_align;
6494 else
6495 type_align = val;
6497 else
6499 tree ttype = TREE_TYPE (arg1);
6501 /* This function is usually invoked and folded immediately by the front
6502 end before anything else has a chance to look at it. The pointer
6503 parameter at this point is usually cast to a void *, so check for that
6504 and look past the cast. */
6505 if (CONVERT_EXPR_P (arg1)
6506 && POINTER_TYPE_P (ttype)
6507 && VOID_TYPE_P (TREE_TYPE (ttype))
6508 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6509 arg1 = TREE_OPERAND (arg1, 0);
6511 ttype = TREE_TYPE (arg1);
6512 gcc_assert (POINTER_TYPE_P (ttype));
6514 /* Get the underlying type of the object. */
6515 ttype = TREE_TYPE (ttype);
6516 type_align = TYPE_ALIGN (ttype);
6519 /* If the object has smaller alignment, the lock free routines cannot
6520 be used. */
6521 if (type_align < mode_align)
6522 return boolean_false_node;
6524 /* Check if a compare_and_swap pattern exists for the mode which represents
6525 the required size. The pattern is not allowed to fail, so the existence
6526 of the pattern indicates support is present. Also require that an
6527 atomic load exists for the required size. */
6528 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6529 return boolean_true_node;
6530 else
6531 return boolean_false_node;
6534 /* Return true if the parameters to call EXP represent an object which will
6535 always generate lock free instructions. The first argument represents the
6536 size of the object, and the second parameter is a pointer to the object
6537 itself. If NULL is passed for the object, then the result is based on
6538 typical alignment for an object of the specified size. Otherwise return
6539 false. */
6541 static rtx
6542 expand_builtin_atomic_always_lock_free (tree exp)
6544 tree size;
6545 tree arg0 = CALL_EXPR_ARG (exp, 0);
6546 tree arg1 = CALL_EXPR_ARG (exp, 1);
6548 if (TREE_CODE (arg0) != INTEGER_CST)
6550 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
6551 return const0_rtx;
6554 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6555 if (size == boolean_true_node)
6556 return const1_rtx;
6557 return const0_rtx;
6560 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6561 is lock free on this architecture. */
6563 static tree
6564 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6566 if (!flag_inline_atomics)
6567 return NULL_TREE;
6569 /* If it isn't always lock free, don't generate a result. */
6570 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6571 return boolean_true_node;
6573 return NULL_TREE;
6576 /* Return true if the parameters to call EXP represent an object which will
6577 always generate lock free instructions. The first argument represents the
6578 size of the object, and the second parameter is a pointer to the object
6579 itself. If NULL is passed for the object, then the result is based on
6580 typical alignment for an object of the specified size. Otherwise return
6581 NULL*/
6583 static rtx
6584 expand_builtin_atomic_is_lock_free (tree exp)
6586 tree size;
6587 tree arg0 = CALL_EXPR_ARG (exp, 0);
6588 tree arg1 = CALL_EXPR_ARG (exp, 1);
6590 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6592 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
6593 return NULL_RTX;
6596 if (!flag_inline_atomics)
6597 return NULL_RTX;
6599 /* If the value is known at compile time, return the RTX for it. */
6600 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6601 if (size == boolean_true_node)
6602 return const1_rtx;
6604 return NULL_RTX;
6607 /* Expand the __atomic_thread_fence intrinsic:
6608 void __atomic_thread_fence (enum memmodel)
6609 EXP is the CALL_EXPR. */
6611 static void
6612 expand_builtin_atomic_thread_fence (tree exp)
6614 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6615 expand_mem_thread_fence (model);
6618 /* Expand the __atomic_signal_fence intrinsic:
6619 void __atomic_signal_fence (enum memmodel)
6620 EXP is the CALL_EXPR. */
6622 static void
6623 expand_builtin_atomic_signal_fence (tree exp)
6625 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6626 expand_mem_signal_fence (model);
6629 /* Expand the __sync_synchronize intrinsic. */
6631 static void
6632 expand_builtin_sync_synchronize (void)
6634 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6637 static rtx
6638 expand_builtin_thread_pointer (tree exp, rtx target)
6640 enum insn_code icode;
6641 if (!validate_arglist (exp, VOID_TYPE))
6642 return const0_rtx;
6643 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6644 if (icode != CODE_FOR_nothing)
6646 class expand_operand op;
6647 /* If the target is not sutitable then create a new target. */
6648 if (target == NULL_RTX
6649 || !REG_P (target)
6650 || GET_MODE (target) != Pmode)
6651 target = gen_reg_rtx (Pmode);
6652 create_output_operand (&op, target, Pmode);
6653 expand_insn (icode, 1, &op);
6654 return target;
6656 error ("%<__builtin_thread_pointer%> is not supported on this target");
6657 return const0_rtx;
6660 static void
6661 expand_builtin_set_thread_pointer (tree exp)
6663 enum insn_code icode;
6664 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6665 return;
6666 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6667 if (icode != CODE_FOR_nothing)
6669 class expand_operand op;
6670 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6671 Pmode, EXPAND_NORMAL);
6672 create_input_operand (&op, val, Pmode);
6673 expand_insn (icode, 1, &op);
6674 return;
6676 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
6680 /* Emit code to restore the current value of stack. */
6682 static void
6683 expand_stack_restore (tree var)
6685 rtx_insn *prev;
6686 rtx sa = expand_normal (var);
6688 sa = convert_memory_address (Pmode, sa);
6690 prev = get_last_insn ();
6691 emit_stack_restore (SAVE_BLOCK, sa);
6693 record_new_stack_level ();
6695 fixup_args_size_notes (prev, get_last_insn (), 0);
6698 /* Emit code to save the current value of stack. */
6700 static rtx
6701 expand_stack_save (void)
6703 rtx ret = NULL_RTX;
6705 emit_stack_save (SAVE_BLOCK, &ret);
6706 return ret;
6709 /* Emit code to get the openacc gang, worker or vector id or size. */
6711 static rtx
6712 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
6714 const char *name;
6715 rtx fallback_retval;
6716 rtx_insn *(*gen_fn) (rtx, rtx);
6717 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
6719 case BUILT_IN_GOACC_PARLEVEL_ID:
6720 name = "__builtin_goacc_parlevel_id";
6721 fallback_retval = const0_rtx;
6722 gen_fn = targetm.gen_oacc_dim_pos;
6723 break;
6724 case BUILT_IN_GOACC_PARLEVEL_SIZE:
6725 name = "__builtin_goacc_parlevel_size";
6726 fallback_retval = const1_rtx;
6727 gen_fn = targetm.gen_oacc_dim_size;
6728 break;
6729 default:
6730 gcc_unreachable ();
6733 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
6735 error ("%qs only supported in OpenACC code", name);
6736 return const0_rtx;
6739 tree arg = CALL_EXPR_ARG (exp, 0);
6740 if (TREE_CODE (arg) != INTEGER_CST)
6742 error ("non-constant argument 0 to %qs", name);
6743 return const0_rtx;
6746 int dim = TREE_INT_CST_LOW (arg);
6747 switch (dim)
6749 case GOMP_DIM_GANG:
6750 case GOMP_DIM_WORKER:
6751 case GOMP_DIM_VECTOR:
6752 break;
6753 default:
6754 error ("illegal argument 0 to %qs", name);
6755 return const0_rtx;
6758 if (ignore)
6759 return target;
6761 if (target == NULL_RTX)
6762 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6764 if (!targetm.have_oacc_dim_size ())
6766 emit_move_insn (target, fallback_retval);
6767 return target;
6770 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
6771 emit_insn (gen_fn (reg, GEN_INT (dim)));
6772 if (reg != target)
6773 emit_move_insn (target, reg);
6775 return target;
6778 /* Expand a string compare operation using a sequence of char comparison
6779 to get rid of the calling overhead, with result going to TARGET if
6780 that's convenient.
6782 VAR_STR is the variable string source;
6783 CONST_STR is the constant string source;
6784 LENGTH is the number of chars to compare;
6785 CONST_STR_N indicates which source string is the constant string;
6786 IS_MEMCMP indicates whether it's a memcmp or strcmp.
6788 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
6790 target = (int) (unsigned char) var_str[0]
6791 - (int) (unsigned char) const_str[0];
6792 if (target != 0)
6793 goto ne_label;
6795 target = (int) (unsigned char) var_str[length - 2]
6796 - (int) (unsigned char) const_str[length - 2];
6797 if (target != 0)
6798 goto ne_label;
6799 target = (int) (unsigned char) var_str[length - 1]
6800 - (int) (unsigned char) const_str[length - 1];
6801 ne_label:
6804 static rtx
6805 inline_string_cmp (rtx target, tree var_str, const char *const_str,
6806 unsigned HOST_WIDE_INT length,
6807 int const_str_n, machine_mode mode)
6809 HOST_WIDE_INT offset = 0;
6810 rtx var_rtx_array
6811 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
6812 rtx var_rtx = NULL_RTX;
6813 rtx const_rtx = NULL_RTX;
6814 rtx result = target ? target : gen_reg_rtx (mode);
6815 rtx_code_label *ne_label = gen_label_rtx ();
6816 tree unit_type_node = unsigned_char_type_node;
6817 scalar_int_mode unit_mode
6818 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
6820 start_sequence ();
6822 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
6824 var_rtx
6825 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
6826 const_rtx = c_readstr (const_str + offset, unit_mode);
6827 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
6828 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
6830 op0 = convert_modes (mode, unit_mode, op0, 1);
6831 op1 = convert_modes (mode, unit_mode, op1, 1);
6832 result = expand_simple_binop (mode, MINUS, op0, op1,
6833 result, 1, OPTAB_WIDEN);
6834 if (i < length - 1)
6835 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
6836 mode, true, ne_label);
6837 offset += GET_MODE_SIZE (unit_mode);
6840 emit_label (ne_label);
6841 rtx_insn *insns = get_insns ();
6842 end_sequence ();
6843 emit_insn (insns);
6845 return result;
6848 /* Inline expansion of a call to str(n)cmp and memcmp, with result going
6849 to TARGET if that's convenient.
6850 If the call is not been inlined, return NULL_RTX. */
6852 static rtx
6853 inline_expand_builtin_bytecmp (tree exp, rtx target)
6855 tree fndecl = get_callee_fndecl (exp);
6856 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6857 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
6859 /* Do NOT apply this inlining expansion when optimizing for size or
6860 optimization level below 2. */
6861 if (optimize < 2 || optimize_insn_for_size_p ())
6862 return NULL_RTX;
6864 gcc_checking_assert (fcode == BUILT_IN_STRCMP
6865 || fcode == BUILT_IN_STRNCMP
6866 || fcode == BUILT_IN_MEMCMP);
6868 /* On a target where the type of the call (int) has same or narrower presicion
6869 than unsigned char, give up the inlining expansion. */
6870 if (TYPE_PRECISION (unsigned_char_type_node)
6871 >= TYPE_PRECISION (TREE_TYPE (exp)))
6872 return NULL_RTX;
6874 tree arg1 = CALL_EXPR_ARG (exp, 0);
6875 tree arg2 = CALL_EXPR_ARG (exp, 1);
6876 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
6878 unsigned HOST_WIDE_INT len1 = 0;
6879 unsigned HOST_WIDE_INT len2 = 0;
6880 unsigned HOST_WIDE_INT len3 = 0;
6882 /* Get the object representation of the initializers of ARG1 and ARG2
6883 as strings, provided they refer to constant objects, with their byte
6884 sizes in LEN1 and LEN2, respectively. */
6885 const char *bytes1 = getbyterep (arg1, &len1);
6886 const char *bytes2 = getbyterep (arg2, &len2);
6888 /* Fail if neither argument refers to an initialized constant. */
6889 if (!bytes1 && !bytes2)
6890 return NULL_RTX;
6892 if (is_ncmp)
6894 /* Fail if the memcmp/strncmp bound is not a constant. */
6895 if (!tree_fits_uhwi_p (len3_tree))
6896 return NULL_RTX;
6898 len3 = tree_to_uhwi (len3_tree);
6900 if (fcode == BUILT_IN_MEMCMP)
6902 /* Fail if the memcmp bound is greater than the size of either
6903 of the two constant objects. */
6904 if ((bytes1 && len1 < len3)
6905 || (bytes2 && len2 < len3))
6906 return NULL_RTX;
6910 if (fcode != BUILT_IN_MEMCMP)
6912 /* For string functions (i.e., strcmp and strncmp) reduce LEN1
6913 and LEN2 to the length of the nul-terminated string stored
6914 in each. */
6915 if (bytes1 != NULL)
6916 len1 = strnlen (bytes1, len1) + 1;
6917 if (bytes2 != NULL)
6918 len2 = strnlen (bytes2, len2) + 1;
6921 /* See inline_string_cmp. */
6922 int const_str_n;
6923 if (!len1)
6924 const_str_n = 2;
6925 else if (!len2)
6926 const_str_n = 1;
6927 else if (len2 > len1)
6928 const_str_n = 1;
6929 else
6930 const_str_n = 2;
6932 /* For strncmp only, compute the new bound as the smallest of
6933 the lengths of the two strings (plus 1) and the bound provided
6934 to the function. */
6935 unsigned HOST_WIDE_INT bound = (const_str_n == 1) ? len1 : len2;
6936 if (is_ncmp && len3 < bound)
6937 bound = len3;
6939 /* If the bound of the comparison is larger than the threshold,
6940 do nothing. */
6941 if (bound > (unsigned HOST_WIDE_INT) param_builtin_string_cmp_inline_length)
6942 return NULL_RTX;
6944 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6946 /* Now, start inline expansion the call. */
6947 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
6948 (const_str_n == 1) ? bytes1 : bytes2, bound,
6949 const_str_n, mode);
6952 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
6953 represents the size of the first argument to that call, or VOIDmode
6954 if the argument is a pointer. IGNORE will be true if the result
6955 isn't used. */
6956 static rtx
6957 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
6958 bool ignore)
6960 rtx val, failsafe;
6961 unsigned nargs = call_expr_nargs (exp);
6963 tree arg0 = CALL_EXPR_ARG (exp, 0);
6965 if (mode == VOIDmode)
6967 mode = TYPE_MODE (TREE_TYPE (arg0));
6968 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
6971 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
6973 /* An optional second argument can be used as a failsafe value on
6974 some machines. If it isn't present, then the failsafe value is
6975 assumed to be 0. */
6976 if (nargs > 1)
6978 tree arg1 = CALL_EXPR_ARG (exp, 1);
6979 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
6981 else
6982 failsafe = const0_rtx;
6984 /* If the result isn't used, the behavior is undefined. It would be
6985 nice to emit a warning here, but path splitting means this might
6986 happen with legitimate code. So simply drop the builtin
6987 expansion in that case; we've handled any side-effects above. */
6988 if (ignore)
6989 return const0_rtx;
6991 /* If we don't have a suitable target, create one to hold the result. */
6992 if (target == NULL || GET_MODE (target) != mode)
6993 target = gen_reg_rtx (mode);
6995 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
6996 val = convert_modes (mode, VOIDmode, val, false);
6998 return targetm.speculation_safe_value (mode, target, val, failsafe);
7001 /* Expand an expression EXP that calls a built-in function,
7002 with result going to TARGET if that's convenient
7003 (and in mode MODE if that's convenient).
7004 SUBTARGET may be used as the target for computing one of EXP's operands.
7005 IGNORE is nonzero if the value is to be ignored. */
7008 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7009 int ignore)
7011 tree fndecl = get_callee_fndecl (exp);
7012 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7013 int flags;
7015 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7016 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7018 /* When ASan is enabled, we don't want to expand some memory/string
7019 builtins and rely on libsanitizer's hooks. This allows us to avoid
7020 redundant checks and be sure, that possible overflow will be detected
7021 by ASan. */
7023 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7024 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7025 return expand_call (exp, target, ignore);
7027 /* When not optimizing, generate calls to library functions for a certain
7028 set of builtins. */
7029 if (!optimize
7030 && !called_as_built_in (fndecl)
7031 && fcode != BUILT_IN_FORK
7032 && fcode != BUILT_IN_EXECL
7033 && fcode != BUILT_IN_EXECV
7034 && fcode != BUILT_IN_EXECLP
7035 && fcode != BUILT_IN_EXECLE
7036 && fcode != BUILT_IN_EXECVP
7037 && fcode != BUILT_IN_EXECVE
7038 && fcode != BUILT_IN_CLEAR_CACHE
7039 && !ALLOCA_FUNCTION_CODE_P (fcode)
7040 && fcode != BUILT_IN_FREE)
7041 return expand_call (exp, target, ignore);
7043 /* The built-in function expanders test for target == const0_rtx
7044 to determine whether the function's result will be ignored. */
7045 if (ignore)
7046 target = const0_rtx;
7048 /* If the result of a pure or const built-in function is ignored, and
7049 none of its arguments are volatile, we can avoid expanding the
7050 built-in call and just evaluate the arguments for side-effects. */
7051 if (target == const0_rtx
7052 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7053 && !(flags & ECF_LOOPING_CONST_OR_PURE))
7055 bool volatilep = false;
7056 tree arg;
7057 call_expr_arg_iterator iter;
7059 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7060 if (TREE_THIS_VOLATILE (arg))
7062 volatilep = true;
7063 break;
7066 if (! volatilep)
7068 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7069 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7070 return const0_rtx;
7074 switch (fcode)
7076 CASE_FLT_FN (BUILT_IN_FABS):
7077 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7078 case BUILT_IN_FABSD32:
7079 case BUILT_IN_FABSD64:
7080 case BUILT_IN_FABSD128:
7081 target = expand_builtin_fabs (exp, target, subtarget);
7082 if (target)
7083 return target;
7084 break;
7086 CASE_FLT_FN (BUILT_IN_COPYSIGN):
7087 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7088 target = expand_builtin_copysign (exp, target, subtarget);
7089 if (target)
7090 return target;
7091 break;
7093 /* Just do a normal library call if we were unable to fold
7094 the values. */
7095 CASE_FLT_FN (BUILT_IN_CABS):
7096 break;
7098 CASE_FLT_FN (BUILT_IN_FMA):
7099 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7100 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7101 if (target)
7102 return target;
7103 break;
7105 CASE_FLT_FN (BUILT_IN_ILOGB):
7106 if (! flag_unsafe_math_optimizations)
7107 break;
7108 gcc_fallthrough ();
7109 CASE_FLT_FN (BUILT_IN_ISINF):
7110 CASE_FLT_FN (BUILT_IN_FINITE):
7111 case BUILT_IN_ISFINITE:
7112 case BUILT_IN_ISNORMAL:
7113 target = expand_builtin_interclass_mathfn (exp, target);
7114 if (target)
7115 return target;
7116 break;
7118 CASE_FLT_FN (BUILT_IN_ICEIL):
7119 CASE_FLT_FN (BUILT_IN_LCEIL):
7120 CASE_FLT_FN (BUILT_IN_LLCEIL):
7121 CASE_FLT_FN (BUILT_IN_LFLOOR):
7122 CASE_FLT_FN (BUILT_IN_IFLOOR):
7123 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7124 target = expand_builtin_int_roundingfn (exp, target);
7125 if (target)
7126 return target;
7127 break;
7129 CASE_FLT_FN (BUILT_IN_IRINT):
7130 CASE_FLT_FN (BUILT_IN_LRINT):
7131 CASE_FLT_FN (BUILT_IN_LLRINT):
7132 CASE_FLT_FN (BUILT_IN_IROUND):
7133 CASE_FLT_FN (BUILT_IN_LROUND):
7134 CASE_FLT_FN (BUILT_IN_LLROUND):
7135 target = expand_builtin_int_roundingfn_2 (exp, target);
7136 if (target)
7137 return target;
7138 break;
7140 CASE_FLT_FN (BUILT_IN_POWI):
7141 target = expand_builtin_powi (exp, target);
7142 if (target)
7143 return target;
7144 break;
7146 CASE_FLT_FN (BUILT_IN_CEXPI):
7147 target = expand_builtin_cexpi (exp, target);
7148 gcc_assert (target);
7149 return target;
7151 CASE_FLT_FN (BUILT_IN_SIN):
7152 CASE_FLT_FN (BUILT_IN_COS):
7153 if (! flag_unsafe_math_optimizations)
7154 break;
7155 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7156 if (target)
7157 return target;
7158 break;
7160 CASE_FLT_FN (BUILT_IN_SINCOS):
7161 if (! flag_unsafe_math_optimizations)
7162 break;
7163 target = expand_builtin_sincos (exp);
7164 if (target)
7165 return target;
7166 break;
7168 case BUILT_IN_FEGETROUND:
7169 target = expand_builtin_fegetround (exp, target, target_mode);
7170 if (target)
7171 return target;
7172 break;
7174 case BUILT_IN_FECLEAREXCEPT:
7175 target = expand_builtin_feclear_feraise_except (exp, target, target_mode,
7176 feclearexcept_optab);
7177 if (target)
7178 return target;
7179 break;
7181 case BUILT_IN_FERAISEEXCEPT:
7182 target = expand_builtin_feclear_feraise_except (exp, target, target_mode,
7183 feraiseexcept_optab);
7184 if (target)
7185 return target;
7186 break;
7188 case BUILT_IN_APPLY_ARGS:
7189 return expand_builtin_apply_args ();
7191 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7192 FUNCTION with a copy of the parameters described by
7193 ARGUMENTS, and ARGSIZE. It returns a block of memory
7194 allocated on the stack into which is stored all the registers
7195 that might possibly be used for returning the result of a
7196 function. ARGUMENTS is the value returned by
7197 __builtin_apply_args. ARGSIZE is the number of bytes of
7198 arguments that must be copied. ??? How should this value be
7199 computed? We'll also need a safe worst case value for varargs
7200 functions. */
7201 case BUILT_IN_APPLY:
7202 if (!validate_arglist (exp, POINTER_TYPE,
7203 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7204 && !validate_arglist (exp, REFERENCE_TYPE,
7205 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7206 return const0_rtx;
7207 else
7209 rtx ops[3];
7211 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7212 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7213 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7215 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7218 /* __builtin_return (RESULT) causes the function to return the
7219 value described by RESULT. RESULT is address of the block of
7220 memory returned by __builtin_apply. */
7221 case BUILT_IN_RETURN:
7222 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7223 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7224 return const0_rtx;
7226 case BUILT_IN_SAVEREGS:
7227 return expand_builtin_saveregs ();
7229 case BUILT_IN_VA_ARG_PACK:
7230 /* All valid uses of __builtin_va_arg_pack () are removed during
7231 inlining. */
7232 error ("invalid use of %<__builtin_va_arg_pack ()%>");
7233 return const0_rtx;
7235 case BUILT_IN_VA_ARG_PACK_LEN:
7236 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7237 inlining. */
7238 error ("invalid use of %<__builtin_va_arg_pack_len ()%>");
7239 return const0_rtx;
7241 /* Return the address of the first anonymous stack arg. */
7242 case BUILT_IN_NEXT_ARG:
7243 if (fold_builtin_next_arg (exp, false))
7244 return const0_rtx;
7245 return expand_builtin_next_arg ();
7247 case BUILT_IN_CLEAR_CACHE:
7248 expand_builtin___clear_cache (exp);
7249 return const0_rtx;
7251 case BUILT_IN_CLASSIFY_TYPE:
7252 return expand_builtin_classify_type (exp);
7254 case BUILT_IN_CONSTANT_P:
7255 return const0_rtx;
7257 case BUILT_IN_FRAME_ADDRESS:
7258 case BUILT_IN_RETURN_ADDRESS:
7259 return expand_builtin_frame_address (fndecl, exp);
7261 /* Returns the address of the area where the structure is returned.
7262 0 otherwise. */
7263 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7264 if (call_expr_nargs (exp) != 0
7265 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7266 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7267 return const0_rtx;
7268 else
7269 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7271 CASE_BUILT_IN_ALLOCA:
7272 target = expand_builtin_alloca (exp);
7273 if (target)
7274 return target;
7275 break;
7277 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7278 return expand_asan_emit_allocas_unpoison (exp);
7280 case BUILT_IN_STACK_SAVE:
7281 return expand_stack_save ();
7283 case BUILT_IN_STACK_RESTORE:
7284 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7285 return const0_rtx;
7287 case BUILT_IN_BSWAP16:
7288 case BUILT_IN_BSWAP32:
7289 case BUILT_IN_BSWAP64:
7290 case BUILT_IN_BSWAP128:
7291 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7292 if (target)
7293 return target;
7294 break;
7296 CASE_INT_FN (BUILT_IN_FFS):
7297 target = expand_builtin_unop (target_mode, exp, target,
7298 subtarget, ffs_optab);
7299 if (target)
7300 return target;
7301 break;
7303 CASE_INT_FN (BUILT_IN_CLZ):
7304 target = expand_builtin_unop (target_mode, exp, target,
7305 subtarget, clz_optab);
7306 if (target)
7307 return target;
7308 break;
7310 CASE_INT_FN (BUILT_IN_CTZ):
7311 target = expand_builtin_unop (target_mode, exp, target,
7312 subtarget, ctz_optab);
7313 if (target)
7314 return target;
7315 break;
7317 CASE_INT_FN (BUILT_IN_CLRSB):
7318 target = expand_builtin_unop (target_mode, exp, target,
7319 subtarget, clrsb_optab);
7320 if (target)
7321 return target;
7322 break;
7324 CASE_INT_FN (BUILT_IN_POPCOUNT):
7325 target = expand_builtin_unop (target_mode, exp, target,
7326 subtarget, popcount_optab);
7327 if (target)
7328 return target;
7329 break;
7331 CASE_INT_FN (BUILT_IN_PARITY):
7332 target = expand_builtin_unop (target_mode, exp, target,
7333 subtarget, parity_optab);
7334 if (target)
7335 return target;
7336 break;
7338 case BUILT_IN_STRLEN:
7339 target = expand_builtin_strlen (exp, target, target_mode);
7340 if (target)
7341 return target;
7342 break;
7344 case BUILT_IN_STRNLEN:
7345 target = expand_builtin_strnlen (exp, target, target_mode);
7346 if (target)
7347 return target;
7348 break;
7350 case BUILT_IN_STRCPY:
7351 target = expand_builtin_strcpy (exp, target);
7352 if (target)
7353 return target;
7354 break;
7356 case BUILT_IN_STRNCPY:
7357 target = expand_builtin_strncpy (exp, target);
7358 if (target)
7359 return target;
7360 break;
7362 case BUILT_IN_STPCPY:
7363 target = expand_builtin_stpcpy (exp, target, mode);
7364 if (target)
7365 return target;
7366 break;
7368 case BUILT_IN_MEMCPY:
7369 target = expand_builtin_memcpy (exp, target);
7370 if (target)
7371 return target;
7372 break;
7374 case BUILT_IN_MEMMOVE:
7375 target = expand_builtin_memmove (exp, target);
7376 if (target)
7377 return target;
7378 break;
7380 case BUILT_IN_MEMPCPY:
7381 target = expand_builtin_mempcpy (exp, target);
7382 if (target)
7383 return target;
7384 break;
7386 case BUILT_IN_MEMSET:
7387 target = expand_builtin_memset (exp, target, mode);
7388 if (target)
7389 return target;
7390 break;
7392 case BUILT_IN_BZERO:
7393 target = expand_builtin_bzero (exp);
7394 if (target)
7395 return target;
7396 break;
7398 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7399 back to a BUILT_IN_STRCMP. Remember to delete the 3rd parameter
7400 when changing it to a strcmp call. */
7401 case BUILT_IN_STRCMP_EQ:
7402 target = expand_builtin_memcmp (exp, target, true);
7403 if (target)
7404 return target;
7406 /* Change this call back to a BUILT_IN_STRCMP. */
7407 TREE_OPERAND (exp, 1)
7408 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7410 /* Delete the last parameter. */
7411 unsigned int i;
7412 vec<tree, va_gc> *arg_vec;
7413 vec_alloc (arg_vec, 2);
7414 for (i = 0; i < 2; i++)
7415 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7416 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7417 /* FALLTHROUGH */
7419 case BUILT_IN_STRCMP:
7420 target = expand_builtin_strcmp (exp, target);
7421 if (target)
7422 return target;
7423 break;
7425 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7426 back to a BUILT_IN_STRNCMP. */
7427 case BUILT_IN_STRNCMP_EQ:
7428 target = expand_builtin_memcmp (exp, target, true);
7429 if (target)
7430 return target;
7432 /* Change it back to a BUILT_IN_STRNCMP. */
7433 TREE_OPERAND (exp, 1)
7434 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7435 /* FALLTHROUGH */
7437 case BUILT_IN_STRNCMP:
7438 target = expand_builtin_strncmp (exp, target, mode);
7439 if (target)
7440 return target;
7441 break;
7443 case BUILT_IN_BCMP:
7444 case BUILT_IN_MEMCMP:
7445 case BUILT_IN_MEMCMP_EQ:
7446 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7447 if (target)
7448 return target;
7449 if (fcode == BUILT_IN_MEMCMP_EQ)
7451 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7452 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7454 break;
7456 case BUILT_IN_SETJMP:
7457 /* This should have been lowered to the builtins below. */
7458 gcc_unreachable ();
7460 case BUILT_IN_SETJMP_SETUP:
7461 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7462 and the receiver label. */
7463 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7465 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7466 VOIDmode, EXPAND_NORMAL);
7467 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7468 rtx_insn *label_r = label_rtx (label);
7470 /* This is copied from the handling of non-local gotos. */
7471 expand_builtin_setjmp_setup (buf_addr, label_r);
7472 nonlocal_goto_handler_labels
7473 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7474 nonlocal_goto_handler_labels);
7475 /* ??? Do not let expand_label treat us as such since we would
7476 not want to be both on the list of non-local labels and on
7477 the list of forced labels. */
7478 FORCED_LABEL (label) = 0;
7479 return const0_rtx;
7481 break;
7483 case BUILT_IN_SETJMP_RECEIVER:
7484 /* __builtin_setjmp_receiver is passed the receiver label. */
7485 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7487 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7488 rtx_insn *label_r = label_rtx (label);
7490 expand_builtin_setjmp_receiver (label_r);
7491 return const0_rtx;
7493 break;
7495 /* __builtin_longjmp is passed a pointer to an array of five words.
7496 It's similar to the C library longjmp function but works with
7497 __builtin_setjmp above. */
7498 case BUILT_IN_LONGJMP:
7499 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7501 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7502 VOIDmode, EXPAND_NORMAL);
7503 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7505 if (value != const1_rtx)
7507 error ("%<__builtin_longjmp%> second argument must be 1");
7508 return const0_rtx;
7511 expand_builtin_longjmp (buf_addr, value);
7512 return const0_rtx;
7514 break;
7516 case BUILT_IN_NONLOCAL_GOTO:
7517 target = expand_builtin_nonlocal_goto (exp);
7518 if (target)
7519 return target;
7520 break;
7522 /* This updates the setjmp buffer that is its argument with the value
7523 of the current stack pointer. */
7524 case BUILT_IN_UPDATE_SETJMP_BUF:
7525 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7527 rtx buf_addr
7528 = expand_normal (CALL_EXPR_ARG (exp, 0));
7530 expand_builtin_update_setjmp_buf (buf_addr);
7531 return const0_rtx;
7533 break;
7535 case BUILT_IN_TRAP:
7536 expand_builtin_trap ();
7537 return const0_rtx;
7539 case BUILT_IN_UNREACHABLE:
7540 expand_builtin_unreachable ();
7541 return const0_rtx;
7543 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7544 case BUILT_IN_SIGNBITD32:
7545 case BUILT_IN_SIGNBITD64:
7546 case BUILT_IN_SIGNBITD128:
7547 target = expand_builtin_signbit (exp, target);
7548 if (target)
7549 return target;
7550 break;
7552 /* Various hooks for the DWARF 2 __throw routine. */
7553 case BUILT_IN_UNWIND_INIT:
7554 expand_builtin_unwind_init ();
7555 return const0_rtx;
7556 case BUILT_IN_DWARF_CFA:
7557 return virtual_cfa_rtx;
7558 #ifdef DWARF2_UNWIND_INFO
7559 case BUILT_IN_DWARF_SP_COLUMN:
7560 return expand_builtin_dwarf_sp_column ();
7561 case BUILT_IN_INIT_DWARF_REG_SIZES:
7562 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7563 return const0_rtx;
7564 #endif
7565 case BUILT_IN_FROB_RETURN_ADDR:
7566 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7567 case BUILT_IN_EXTRACT_RETURN_ADDR:
7568 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7569 case BUILT_IN_EH_RETURN:
7570 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7571 CALL_EXPR_ARG (exp, 1));
7572 return const0_rtx;
7573 case BUILT_IN_EH_RETURN_DATA_REGNO:
7574 return expand_builtin_eh_return_data_regno (exp);
7575 case BUILT_IN_EXTEND_POINTER:
7576 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7577 case BUILT_IN_EH_POINTER:
7578 return expand_builtin_eh_pointer (exp);
7579 case BUILT_IN_EH_FILTER:
7580 return expand_builtin_eh_filter (exp);
7581 case BUILT_IN_EH_COPY_VALUES:
7582 return expand_builtin_eh_copy_values (exp);
7584 case BUILT_IN_VA_START:
7585 return expand_builtin_va_start (exp);
7586 case BUILT_IN_VA_END:
7587 return expand_builtin_va_end (exp);
7588 case BUILT_IN_VA_COPY:
7589 return expand_builtin_va_copy (exp);
7590 case BUILT_IN_EXPECT:
7591 return expand_builtin_expect (exp, target);
7592 case BUILT_IN_EXPECT_WITH_PROBABILITY:
7593 return expand_builtin_expect_with_probability (exp, target);
7594 case BUILT_IN_ASSUME_ALIGNED:
7595 return expand_builtin_assume_aligned (exp, target);
7596 case BUILT_IN_PREFETCH:
7597 expand_builtin_prefetch (exp);
7598 return const0_rtx;
7600 case BUILT_IN_INIT_TRAMPOLINE:
7601 return expand_builtin_init_trampoline (exp, true);
7602 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7603 return expand_builtin_init_trampoline (exp, false);
7604 case BUILT_IN_ADJUST_TRAMPOLINE:
7605 return expand_builtin_adjust_trampoline (exp);
7607 case BUILT_IN_INIT_DESCRIPTOR:
7608 return expand_builtin_init_descriptor (exp);
7609 case BUILT_IN_ADJUST_DESCRIPTOR:
7610 return expand_builtin_adjust_descriptor (exp);
7612 case BUILT_IN_FORK:
7613 case BUILT_IN_EXECL:
7614 case BUILT_IN_EXECV:
7615 case BUILT_IN_EXECLP:
7616 case BUILT_IN_EXECLE:
7617 case BUILT_IN_EXECVP:
7618 case BUILT_IN_EXECVE:
7619 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7620 if (target)
7621 return target;
7622 break;
7624 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7625 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7626 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7627 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7628 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7629 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7630 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7631 if (target)
7632 return target;
7633 break;
7635 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7636 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7637 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7638 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7639 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7640 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7641 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7642 if (target)
7643 return target;
7644 break;
7646 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7647 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7648 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7649 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7650 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7651 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7652 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7653 if (target)
7654 return target;
7655 break;
7657 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7658 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7659 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7660 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7661 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7662 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7663 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7664 if (target)
7665 return target;
7666 break;
7668 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7669 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7670 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7671 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7672 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7673 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7674 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7675 if (target)
7676 return target;
7677 break;
7679 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7680 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7681 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7682 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7683 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7684 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7685 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7686 if (target)
7687 return target;
7688 break;
7690 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7691 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7692 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7693 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7694 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7695 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7696 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7697 if (target)
7698 return target;
7699 break;
7701 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7702 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7703 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7704 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7705 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7706 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7707 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7708 if (target)
7709 return target;
7710 break;
7712 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7713 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7714 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7715 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7716 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7717 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7718 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7719 if (target)
7720 return target;
7721 break;
7723 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7724 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7725 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7726 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7727 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7728 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7729 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7730 if (target)
7731 return target;
7732 break;
7734 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7735 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7736 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7737 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7738 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7739 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7740 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7741 if (target)
7742 return target;
7743 break;
7745 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7746 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7747 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7748 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7749 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7750 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7751 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7752 if (target)
7753 return target;
7754 break;
7756 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7757 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7758 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7759 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7760 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7761 if (mode == VOIDmode)
7762 mode = TYPE_MODE (boolean_type_node);
7763 if (!target || !register_operand (target, mode))
7764 target = gen_reg_rtx (mode);
7766 mode = get_builtin_sync_mode
7767 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7768 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7769 if (target)
7770 return target;
7771 break;
7773 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7774 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7775 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7776 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7777 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7778 mode = get_builtin_sync_mode
7779 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7780 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7781 if (target)
7782 return target;
7783 break;
7785 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7786 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7787 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7788 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7789 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7790 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7791 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7792 if (target)
7793 return target;
7794 break;
7796 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7797 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7798 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7799 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7800 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7801 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7802 expand_builtin_sync_lock_release (mode, exp);
7803 return const0_rtx;
7805 case BUILT_IN_SYNC_SYNCHRONIZE:
7806 expand_builtin_sync_synchronize ();
7807 return const0_rtx;
7809 case BUILT_IN_ATOMIC_EXCHANGE_1:
7810 case BUILT_IN_ATOMIC_EXCHANGE_2:
7811 case BUILT_IN_ATOMIC_EXCHANGE_4:
7812 case BUILT_IN_ATOMIC_EXCHANGE_8:
7813 case BUILT_IN_ATOMIC_EXCHANGE_16:
7814 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7815 target = expand_builtin_atomic_exchange (mode, exp, target);
7816 if (target)
7817 return target;
7818 break;
7820 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7821 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7822 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7823 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7824 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7826 unsigned int nargs, z;
7827 vec<tree, va_gc> *vec;
7829 mode =
7830 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7831 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7832 if (target)
7833 return target;
7835 /* If this is turned into an external library call, the weak parameter
7836 must be dropped to match the expected parameter list. */
7837 nargs = call_expr_nargs (exp);
7838 vec_alloc (vec, nargs - 1);
7839 for (z = 0; z < 3; z++)
7840 vec->quick_push (CALL_EXPR_ARG (exp, z));
7841 /* Skip the boolean weak parameter. */
7842 for (z = 4; z < 6; z++)
7843 vec->quick_push (CALL_EXPR_ARG (exp, z));
7844 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7845 break;
7848 case BUILT_IN_ATOMIC_LOAD_1:
7849 case BUILT_IN_ATOMIC_LOAD_2:
7850 case BUILT_IN_ATOMIC_LOAD_4:
7851 case BUILT_IN_ATOMIC_LOAD_8:
7852 case BUILT_IN_ATOMIC_LOAD_16:
7853 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7854 target = expand_builtin_atomic_load (mode, exp, target);
7855 if (target)
7856 return target;
7857 break;
7859 case BUILT_IN_ATOMIC_STORE_1:
7860 case BUILT_IN_ATOMIC_STORE_2:
7861 case BUILT_IN_ATOMIC_STORE_4:
7862 case BUILT_IN_ATOMIC_STORE_8:
7863 case BUILT_IN_ATOMIC_STORE_16:
7864 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7865 target = expand_builtin_atomic_store (mode, exp);
7866 if (target)
7867 return const0_rtx;
7868 break;
7870 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7871 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7872 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7873 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7874 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7876 enum built_in_function lib;
7877 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7878 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7879 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7880 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7881 ignore, lib);
7882 if (target)
7883 return target;
7884 break;
7886 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7887 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7888 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7889 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7890 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7892 enum built_in_function lib;
7893 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7894 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7895 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7896 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7897 ignore, lib);
7898 if (target)
7899 return target;
7900 break;
7902 case BUILT_IN_ATOMIC_AND_FETCH_1:
7903 case BUILT_IN_ATOMIC_AND_FETCH_2:
7904 case BUILT_IN_ATOMIC_AND_FETCH_4:
7905 case BUILT_IN_ATOMIC_AND_FETCH_8:
7906 case BUILT_IN_ATOMIC_AND_FETCH_16:
7908 enum built_in_function lib;
7909 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7910 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7911 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7912 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7913 ignore, lib);
7914 if (target)
7915 return target;
7916 break;
7918 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7919 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7920 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7921 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7922 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7924 enum built_in_function lib;
7925 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7926 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7927 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7928 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7929 ignore, lib);
7930 if (target)
7931 return target;
7932 break;
7934 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7935 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7936 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7937 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7938 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7940 enum built_in_function lib;
7941 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7942 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7943 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7944 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7945 ignore, lib);
7946 if (target)
7947 return target;
7948 break;
7950 case BUILT_IN_ATOMIC_OR_FETCH_1:
7951 case BUILT_IN_ATOMIC_OR_FETCH_2:
7952 case BUILT_IN_ATOMIC_OR_FETCH_4:
7953 case BUILT_IN_ATOMIC_OR_FETCH_8:
7954 case BUILT_IN_ATOMIC_OR_FETCH_16:
7956 enum built_in_function lib;
7957 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7958 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7959 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7960 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7961 ignore, lib);
7962 if (target)
7963 return target;
7964 break;
7966 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7967 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7968 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7969 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7970 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7971 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7972 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7973 ignore, BUILT_IN_NONE);
7974 if (target)
7975 return target;
7976 break;
7978 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7979 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7980 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7981 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7982 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7983 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7984 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7985 ignore, BUILT_IN_NONE);
7986 if (target)
7987 return target;
7988 break;
7990 case BUILT_IN_ATOMIC_FETCH_AND_1:
7991 case BUILT_IN_ATOMIC_FETCH_AND_2:
7992 case BUILT_IN_ATOMIC_FETCH_AND_4:
7993 case BUILT_IN_ATOMIC_FETCH_AND_8:
7994 case BUILT_IN_ATOMIC_FETCH_AND_16:
7995 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7996 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7997 ignore, BUILT_IN_NONE);
7998 if (target)
7999 return target;
8000 break;
8002 case BUILT_IN_ATOMIC_FETCH_NAND_1:
8003 case BUILT_IN_ATOMIC_FETCH_NAND_2:
8004 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8005 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8006 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8007 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8008 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8009 ignore, BUILT_IN_NONE);
8010 if (target)
8011 return target;
8012 break;
8014 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8015 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8016 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8017 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8018 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8019 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8020 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8021 ignore, BUILT_IN_NONE);
8022 if (target)
8023 return target;
8024 break;
8026 case BUILT_IN_ATOMIC_FETCH_OR_1:
8027 case BUILT_IN_ATOMIC_FETCH_OR_2:
8028 case BUILT_IN_ATOMIC_FETCH_OR_4:
8029 case BUILT_IN_ATOMIC_FETCH_OR_8:
8030 case BUILT_IN_ATOMIC_FETCH_OR_16:
8031 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8032 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8033 ignore, BUILT_IN_NONE);
8034 if (target)
8035 return target;
8036 break;
8038 case BUILT_IN_ATOMIC_TEST_AND_SET:
8039 return expand_builtin_atomic_test_and_set (exp, target);
8041 case BUILT_IN_ATOMIC_CLEAR:
8042 return expand_builtin_atomic_clear (exp);
8044 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8045 return expand_builtin_atomic_always_lock_free (exp);
8047 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8048 target = expand_builtin_atomic_is_lock_free (exp);
8049 if (target)
8050 return target;
8051 break;
8053 case BUILT_IN_ATOMIC_THREAD_FENCE:
8054 expand_builtin_atomic_thread_fence (exp);
8055 return const0_rtx;
8057 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8058 expand_builtin_atomic_signal_fence (exp);
8059 return const0_rtx;
8061 case BUILT_IN_OBJECT_SIZE:
8062 case BUILT_IN_DYNAMIC_OBJECT_SIZE:
8063 return expand_builtin_object_size (exp);
8065 case BUILT_IN_MEMCPY_CHK:
8066 case BUILT_IN_MEMPCPY_CHK:
8067 case BUILT_IN_MEMMOVE_CHK:
8068 case BUILT_IN_MEMSET_CHK:
8069 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8070 if (target)
8071 return target;
8072 break;
8074 case BUILT_IN_STRCPY_CHK:
8075 case BUILT_IN_STPCPY_CHK:
8076 case BUILT_IN_STRNCPY_CHK:
8077 case BUILT_IN_STPNCPY_CHK:
8078 case BUILT_IN_STRCAT_CHK:
8079 case BUILT_IN_STRNCAT_CHK:
8080 case BUILT_IN_SNPRINTF_CHK:
8081 case BUILT_IN_VSNPRINTF_CHK:
8082 maybe_emit_chk_warning (exp, fcode);
8083 break;
8085 case BUILT_IN_SPRINTF_CHK:
8086 case BUILT_IN_VSPRINTF_CHK:
8087 maybe_emit_sprintf_chk_warning (exp, fcode);
8088 break;
8090 case BUILT_IN_THREAD_POINTER:
8091 return expand_builtin_thread_pointer (exp, target);
8093 case BUILT_IN_SET_THREAD_POINTER:
8094 expand_builtin_set_thread_pointer (exp);
8095 return const0_rtx;
8097 case BUILT_IN_ACC_ON_DEVICE:
8098 /* Do library call, if we failed to expand the builtin when
8099 folding. */
8100 break;
8102 case BUILT_IN_GOACC_PARLEVEL_ID:
8103 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8104 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8106 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8107 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8109 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8110 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8111 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8112 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8113 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8114 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8115 return expand_speculation_safe_value (mode, exp, target, ignore);
8117 default: /* just do library call, if unknown builtin */
8118 break;
8121 /* The switch statement above can drop through to cause the function
8122 to be called normally. */
8123 return expand_call (exp, target, ignore);
8126 /* Determine whether a tree node represents a call to a built-in
8127 function. If the tree T is a call to a built-in function with
8128 the right number of arguments of the appropriate types, return
8129 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8130 Otherwise the return value is END_BUILTINS. */
8132 enum built_in_function
8133 builtin_mathfn_code (const_tree t)
8135 const_tree fndecl, arg, parmlist;
8136 const_tree argtype, parmtype;
8137 const_call_expr_arg_iterator iter;
8139 if (TREE_CODE (t) != CALL_EXPR)
8140 return END_BUILTINS;
8142 fndecl = get_callee_fndecl (t);
8143 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8144 return END_BUILTINS;
8146 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8147 init_const_call_expr_arg_iterator (t, &iter);
8148 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8150 /* If a function doesn't take a variable number of arguments,
8151 the last element in the list will have type `void'. */
8152 parmtype = TREE_VALUE (parmlist);
8153 if (VOID_TYPE_P (parmtype))
8155 if (more_const_call_expr_args_p (&iter))
8156 return END_BUILTINS;
8157 return DECL_FUNCTION_CODE (fndecl);
8160 if (! more_const_call_expr_args_p (&iter))
8161 return END_BUILTINS;
8163 arg = next_const_call_expr_arg (&iter);
8164 argtype = TREE_TYPE (arg);
8166 if (SCALAR_FLOAT_TYPE_P (parmtype))
8168 if (! SCALAR_FLOAT_TYPE_P (argtype))
8169 return END_BUILTINS;
8171 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8173 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8174 return END_BUILTINS;
8176 else if (POINTER_TYPE_P (parmtype))
8178 if (! POINTER_TYPE_P (argtype))
8179 return END_BUILTINS;
8181 else if (INTEGRAL_TYPE_P (parmtype))
8183 if (! INTEGRAL_TYPE_P (argtype))
8184 return END_BUILTINS;
8186 else
8187 return END_BUILTINS;
8190 /* Variable-length argument list. */
8191 return DECL_FUNCTION_CODE (fndecl);
8194 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8195 evaluate to a constant. */
8197 static tree
8198 fold_builtin_constant_p (tree arg)
8200 /* We return 1 for a numeric type that's known to be a constant
8201 value at compile-time or for an aggregate type that's a
8202 literal constant. */
8203 STRIP_NOPS (arg);
8205 /* If we know this is a constant, emit the constant of one. */
8206 if (CONSTANT_CLASS_P (arg)
8207 || (TREE_CODE (arg) == CONSTRUCTOR
8208 && TREE_CONSTANT (arg)))
8209 return integer_one_node;
8210 if (TREE_CODE (arg) == ADDR_EXPR)
8212 tree op = TREE_OPERAND (arg, 0);
8213 if (TREE_CODE (op) == STRING_CST
8214 || (TREE_CODE (op) == ARRAY_REF
8215 && integer_zerop (TREE_OPERAND (op, 1))
8216 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8217 return integer_one_node;
8220 /* If this expression has side effects, show we don't know it to be a
8221 constant. Likewise if it's a pointer or aggregate type since in
8222 those case we only want literals, since those are only optimized
8223 when generating RTL, not later.
8224 And finally, if we are compiling an initializer, not code, we
8225 need to return a definite result now; there's not going to be any
8226 more optimization done. */
8227 if (TREE_SIDE_EFFECTS (arg)
8228 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8229 || POINTER_TYPE_P (TREE_TYPE (arg))
8230 || cfun == 0
8231 || folding_initializer
8232 || force_folding_builtin_constant_p)
8233 return integer_zero_node;
8235 return NULL_TREE;
8238 /* Create builtin_expect or builtin_expect_with_probability
8239 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8240 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8241 builtin_expect_with_probability instead uses third argument as PROBABILITY
8242 value. */
8244 static tree
8245 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8246 tree predictor, tree probability)
8248 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
8250 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
8251 : BUILT_IN_EXPECT_WITH_PROBABILITY);
8252 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8253 ret_type = TREE_TYPE (TREE_TYPE (fn));
8254 pred_type = TREE_VALUE (arg_types);
8255 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8257 pred = fold_convert_loc (loc, pred_type, pred);
8258 expected = fold_convert_loc (loc, expected_type, expected);
8260 if (probability)
8261 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
8262 else
8263 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8264 predictor);
8266 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8267 build_int_cst (ret_type, 0));
8270 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
8271 NULL_TREE if no simplification is possible. */
8273 tree
8274 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
8275 tree arg3)
8277 tree inner, fndecl, inner_arg0;
8278 enum tree_code code;
8280 /* Distribute the expected value over short-circuiting operators.
8281 See through the cast from truthvalue_type_node to long. */
8282 inner_arg0 = arg0;
8283 while (CONVERT_EXPR_P (inner_arg0)
8284 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8285 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8286 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8288 /* If this is a builtin_expect within a builtin_expect keep the
8289 inner one. See through a comparison against a constant. It
8290 might have been added to create a thruthvalue. */
8291 inner = inner_arg0;
8293 if (COMPARISON_CLASS_P (inner)
8294 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8295 inner = TREE_OPERAND (inner, 0);
8297 if (TREE_CODE (inner) == CALL_EXPR
8298 && (fndecl = get_callee_fndecl (inner))
8299 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
8300 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
8301 return arg0;
8303 inner = inner_arg0;
8304 code = TREE_CODE (inner);
8305 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8307 tree op0 = TREE_OPERAND (inner, 0);
8308 tree op1 = TREE_OPERAND (inner, 1);
8309 arg1 = save_expr (arg1);
8311 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
8312 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
8313 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8315 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8318 /* If the argument isn't invariant then there's nothing else we can do. */
8319 if (!TREE_CONSTANT (inner_arg0))
8320 return NULL_TREE;
8322 /* If we expect that a comparison against the argument will fold to
8323 a constant return the constant. In practice, this means a true
8324 constant or the address of a non-weak symbol. */
8325 inner = inner_arg0;
8326 STRIP_NOPS (inner);
8327 if (TREE_CODE (inner) == ADDR_EXPR)
8331 inner = TREE_OPERAND (inner, 0);
8333 while (TREE_CODE (inner) == COMPONENT_REF
8334 || TREE_CODE (inner) == ARRAY_REF);
8335 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8336 return NULL_TREE;
8339 /* Otherwise, ARG0 already has the proper type for the return value. */
8340 return arg0;
8343 /* Fold a call to __builtin_classify_type with argument ARG. */
8345 static tree
8346 fold_builtin_classify_type (tree arg)
8348 if (arg == 0)
8349 return build_int_cst (integer_type_node, no_type_class);
8351 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8354 /* Fold a call EXPR (which may be null) to __builtin_strlen with argument
8355 ARG. */
8357 static tree
8358 fold_builtin_strlen (location_t loc, tree expr, tree type, tree arg)
8360 if (!validate_arg (arg, POINTER_TYPE))
8361 return NULL_TREE;
8362 else
8364 c_strlen_data lendata = { };
8365 tree len = c_strlen (arg, 0, &lendata);
8367 if (len)
8368 return fold_convert_loc (loc, type, len);
8370 /* TODO: Move this to gimple-ssa-warn-access once the pass runs
8371 also early enough to detect invalid reads in multimensional
8372 arrays and struct members. */
8373 if (!lendata.decl)
8374 c_strlen (arg, 1, &lendata);
8376 if (lendata.decl)
8378 if (EXPR_HAS_LOCATION (arg))
8379 loc = EXPR_LOCATION (arg);
8380 else if (loc == UNKNOWN_LOCATION)
8381 loc = input_location;
8382 warn_string_no_nul (loc, expr, "strlen", arg, lendata.decl);
8385 return NULL_TREE;
8389 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8391 static tree
8392 fold_builtin_inf (location_t loc, tree type, int warn)
8394 REAL_VALUE_TYPE real;
8396 /* __builtin_inff is intended to be usable to define INFINITY on all
8397 targets. If an infinity is not available, INFINITY expands "to a
8398 positive constant of type float that overflows at translation
8399 time", footnote "In this case, using INFINITY will violate the
8400 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8401 Thus we pedwarn to ensure this constraint violation is
8402 diagnosed. */
8403 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8404 pedwarn (loc, 0, "target format does not support infinity");
8406 real_inf (&real);
8407 return build_real (type, real);
8410 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8411 NULL_TREE if no simplification can be made. */
8413 static tree
8414 fold_builtin_sincos (location_t loc,
8415 tree arg0, tree arg1, tree arg2)
8417 tree type;
8418 tree fndecl, call = NULL_TREE;
8420 if (!validate_arg (arg0, REAL_TYPE)
8421 || !validate_arg (arg1, POINTER_TYPE)
8422 || !validate_arg (arg2, POINTER_TYPE))
8423 return NULL_TREE;
8425 type = TREE_TYPE (arg0);
8427 /* Calculate the result when the argument is a constant. */
8428 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8429 if (fn == END_BUILTINS)
8430 return NULL_TREE;
8432 /* Canonicalize sincos to cexpi. */
8433 if (TREE_CODE (arg0) == REAL_CST)
8435 tree complex_type = build_complex_type (type);
8436 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8438 if (!call)
8440 if (!targetm.libc_has_function (function_c99_math_complex, type)
8441 || !builtin_decl_implicit_p (fn))
8442 return NULL_TREE;
8443 fndecl = builtin_decl_explicit (fn);
8444 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8445 call = builtin_save_expr (call);
8448 tree ptype = build_pointer_type (type);
8449 arg1 = fold_convert (ptype, arg1);
8450 arg2 = fold_convert (ptype, arg2);
8451 return build2 (COMPOUND_EXPR, void_type_node,
8452 build2 (MODIFY_EXPR, void_type_node,
8453 build_fold_indirect_ref_loc (loc, arg1),
8454 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8455 build2 (MODIFY_EXPR, void_type_node,
8456 build_fold_indirect_ref_loc (loc, arg2),
8457 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8460 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8461 Return NULL_TREE if no simplification can be made. */
8463 static tree
8464 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8466 if (!validate_arg (arg1, POINTER_TYPE)
8467 || !validate_arg (arg2, POINTER_TYPE)
8468 || !validate_arg (len, INTEGER_TYPE))
8469 return NULL_TREE;
8471 /* If the LEN parameter is zero, return zero. */
8472 if (integer_zerop (len))
8473 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8474 arg1, arg2);
8476 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8477 if (operand_equal_p (arg1, arg2, 0))
8478 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8480 /* If len parameter is one, return an expression corresponding to
8481 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8482 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8484 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8485 tree cst_uchar_ptr_node
8486 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8488 tree ind1
8489 = fold_convert_loc (loc, integer_type_node,
8490 build1 (INDIRECT_REF, cst_uchar_node,
8491 fold_convert_loc (loc,
8492 cst_uchar_ptr_node,
8493 arg1)));
8494 tree ind2
8495 = fold_convert_loc (loc, integer_type_node,
8496 build1 (INDIRECT_REF, cst_uchar_node,
8497 fold_convert_loc (loc,
8498 cst_uchar_ptr_node,
8499 arg2)));
8500 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8503 return NULL_TREE;
8506 /* Fold a call to builtin isascii with argument ARG. */
8508 static tree
8509 fold_builtin_isascii (location_t loc, tree arg)
8511 if (!validate_arg (arg, INTEGER_TYPE))
8512 return NULL_TREE;
8513 else
8515 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8516 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8517 build_int_cst (integer_type_node,
8518 ~ (unsigned HOST_WIDE_INT) 0x7f));
8519 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8520 arg, integer_zero_node);
8524 /* Fold a call to builtin toascii with argument ARG. */
8526 static tree
8527 fold_builtin_toascii (location_t loc, tree arg)
8529 if (!validate_arg (arg, INTEGER_TYPE))
8530 return NULL_TREE;
8532 /* Transform toascii(c) -> (c & 0x7f). */
8533 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8534 build_int_cst (integer_type_node, 0x7f));
8537 /* Fold a call to builtin isdigit with argument ARG. */
8539 static tree
8540 fold_builtin_isdigit (location_t loc, tree arg)
8542 if (!validate_arg (arg, INTEGER_TYPE))
8543 return NULL_TREE;
8544 else
8546 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8547 /* According to the C standard, isdigit is unaffected by locale.
8548 However, it definitely is affected by the target character set. */
8549 unsigned HOST_WIDE_INT target_digit0
8550 = lang_hooks.to_target_charset ('0');
8552 if (target_digit0 == 0)
8553 return NULL_TREE;
8555 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8556 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8557 build_int_cst (unsigned_type_node, target_digit0));
8558 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8559 build_int_cst (unsigned_type_node, 9));
8563 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8565 static tree
8566 fold_builtin_fabs (location_t loc, tree arg, tree type)
8568 if (!validate_arg (arg, REAL_TYPE))
8569 return NULL_TREE;
8571 arg = fold_convert_loc (loc, type, arg);
8572 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8575 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8577 static tree
8578 fold_builtin_abs (location_t loc, tree arg, tree type)
8580 if (!validate_arg (arg, INTEGER_TYPE))
8581 return NULL_TREE;
8583 arg = fold_convert_loc (loc, type, arg);
8584 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8587 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8589 static tree
8590 fold_builtin_carg (location_t loc, tree arg, tree type)
8592 if (validate_arg (arg, COMPLEX_TYPE)
8593 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8595 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8597 if (atan2_fn)
8599 tree new_arg = builtin_save_expr (arg);
8600 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8601 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8602 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8606 return NULL_TREE;
8609 /* Fold a call to builtin frexp, we can assume the base is 2. */
8611 static tree
8612 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8614 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8615 return NULL_TREE;
8617 STRIP_NOPS (arg0);
8619 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8620 return NULL_TREE;
8622 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8624 /* Proceed if a valid pointer type was passed in. */
8625 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8627 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8628 tree frac, exp;
8630 switch (value->cl)
8632 case rvc_zero:
8633 /* For +-0, return (*exp = 0, +-0). */
8634 exp = integer_zero_node;
8635 frac = arg0;
8636 break;
8637 case rvc_nan:
8638 case rvc_inf:
8639 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8640 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8641 case rvc_normal:
8643 /* Since the frexp function always expects base 2, and in
8644 GCC normalized significands are already in the range
8645 [0.5, 1.0), we have exactly what frexp wants. */
8646 REAL_VALUE_TYPE frac_rvt = *value;
8647 SET_REAL_EXP (&frac_rvt, 0);
8648 frac = build_real (rettype, frac_rvt);
8649 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8651 break;
8652 default:
8653 gcc_unreachable ();
8656 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8657 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8658 TREE_SIDE_EFFECTS (arg1) = 1;
8659 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8662 return NULL_TREE;
8665 /* Fold a call to builtin modf. */
8667 static tree
8668 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8670 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8671 return NULL_TREE;
8673 STRIP_NOPS (arg0);
8675 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8676 return NULL_TREE;
8678 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8680 /* Proceed if a valid pointer type was passed in. */
8681 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8683 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8684 REAL_VALUE_TYPE trunc, frac;
8686 switch (value->cl)
8688 case rvc_nan:
8689 case rvc_zero:
8690 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8691 trunc = frac = *value;
8692 break;
8693 case rvc_inf:
8694 /* For +-Inf, return (*arg1 = arg0, +-0). */
8695 frac = dconst0;
8696 frac.sign = value->sign;
8697 trunc = *value;
8698 break;
8699 case rvc_normal:
8700 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8701 real_trunc (&trunc, VOIDmode, value);
8702 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8703 /* If the original number was negative and already
8704 integral, then the fractional part is -0.0. */
8705 if (value->sign && frac.cl == rvc_zero)
8706 frac.sign = value->sign;
8707 break;
8710 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8711 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8712 build_real (rettype, trunc));
8713 TREE_SIDE_EFFECTS (arg1) = 1;
8714 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8715 build_real (rettype, frac));
8718 return NULL_TREE;
8721 /* Given a location LOC, an interclass builtin function decl FNDECL
8722 and its single argument ARG, return an folded expression computing
8723 the same, or NULL_TREE if we either couldn't or didn't want to fold
8724 (the latter happen if there's an RTL instruction available). */
8726 static tree
8727 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8729 machine_mode mode;
8731 if (!validate_arg (arg, REAL_TYPE))
8732 return NULL_TREE;
8734 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8735 return NULL_TREE;
8737 mode = TYPE_MODE (TREE_TYPE (arg));
8739 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8741 /* If there is no optab, try generic code. */
8742 switch (DECL_FUNCTION_CODE (fndecl))
8744 tree result;
8746 CASE_FLT_FN (BUILT_IN_ISINF):
8748 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8749 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8750 tree type = TREE_TYPE (arg);
8751 REAL_VALUE_TYPE r;
8752 char buf[128];
8754 if (is_ibm_extended)
8756 /* NaN and Inf are encoded in the high-order double value
8757 only. The low-order value is not significant. */
8758 type = double_type_node;
8759 mode = DFmode;
8760 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8762 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
8763 real_from_string (&r, buf);
8764 result = build_call_expr (isgr_fn, 2,
8765 fold_build1_loc (loc, ABS_EXPR, type, arg),
8766 build_real (type, r));
8767 return result;
8769 CASE_FLT_FN (BUILT_IN_FINITE):
8770 case BUILT_IN_ISFINITE:
8772 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8773 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8774 tree type = TREE_TYPE (arg);
8775 REAL_VALUE_TYPE r;
8776 char buf[128];
8778 if (is_ibm_extended)
8780 /* NaN and Inf are encoded in the high-order double value
8781 only. The low-order value is not significant. */
8782 type = double_type_node;
8783 mode = DFmode;
8784 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8786 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
8787 real_from_string (&r, buf);
8788 result = build_call_expr (isle_fn, 2,
8789 fold_build1_loc (loc, ABS_EXPR, type, arg),
8790 build_real (type, r));
8791 /*result = fold_build2_loc (loc, UNGT_EXPR,
8792 TREE_TYPE (TREE_TYPE (fndecl)),
8793 fold_build1_loc (loc, ABS_EXPR, type, arg),
8794 build_real (type, r));
8795 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8796 TREE_TYPE (TREE_TYPE (fndecl)),
8797 result);*/
8798 return result;
8800 case BUILT_IN_ISNORMAL:
8802 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8803 islessequal(fabs(x),DBL_MAX). */
8804 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8805 tree type = TREE_TYPE (arg);
8806 tree orig_arg, max_exp, min_exp;
8807 machine_mode orig_mode = mode;
8808 REAL_VALUE_TYPE rmax, rmin;
8809 char buf[128];
8811 orig_arg = arg = builtin_save_expr (arg);
8812 if (is_ibm_extended)
8814 /* Use double to test the normal range of IBM extended
8815 precision. Emin for IBM extended precision is
8816 different to emin for IEEE double, being 53 higher
8817 since the low double exponent is at least 53 lower
8818 than the high double exponent. */
8819 type = double_type_node;
8820 mode = DFmode;
8821 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8823 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8825 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
8826 real_from_string (&rmax, buf);
8827 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8828 real_from_string (&rmin, buf);
8829 max_exp = build_real (type, rmax);
8830 min_exp = build_real (type, rmin);
8832 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8833 if (is_ibm_extended)
8835 /* Testing the high end of the range is done just using
8836 the high double, using the same test as isfinite().
8837 For the subnormal end of the range we first test the
8838 high double, then if its magnitude is equal to the
8839 limit of 0x1p-969, we test whether the low double is
8840 non-zero and opposite sign to the high double. */
8841 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8842 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8843 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8844 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8845 arg, min_exp);
8846 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8847 complex_double_type_node, orig_arg);
8848 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8849 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8850 tree zero = build_real (type, dconst0);
8851 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8852 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8853 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8854 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8855 fold_build3 (COND_EXPR,
8856 integer_type_node,
8857 hilt, logt, lolt));
8858 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8859 eq_min, ok_lo);
8860 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8861 gt_min, eq_min);
8863 else
8865 tree const isge_fn
8866 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8867 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8869 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8870 max_exp, min_exp);
8871 return result;
8873 default:
8874 break;
8877 return NULL_TREE;
8880 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8881 ARG is the argument for the call. */
8883 static tree
8884 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8886 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8888 if (!validate_arg (arg, REAL_TYPE))
8889 return NULL_TREE;
8891 switch (builtin_index)
8893 case BUILT_IN_ISINF:
8894 if (tree_expr_infinite_p (arg))
8895 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8896 if (!tree_expr_maybe_infinite_p (arg))
8897 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8898 return NULL_TREE;
8900 case BUILT_IN_ISINF_SIGN:
8902 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8903 /* In a boolean context, GCC will fold the inner COND_EXPR to
8904 1. So e.g. "if (isinf_sign(x))" would be folded to just
8905 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8906 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8907 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8908 tree tmp = NULL_TREE;
8910 arg = builtin_save_expr (arg);
8912 if (signbit_fn && isinf_fn)
8914 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8915 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8917 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8918 signbit_call, integer_zero_node);
8919 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8920 isinf_call, integer_zero_node);
8922 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8923 integer_minus_one_node, integer_one_node);
8924 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8925 isinf_call, tmp,
8926 integer_zero_node);
8929 return tmp;
8932 case BUILT_IN_ISFINITE:
8933 if (tree_expr_finite_p (arg))
8934 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8935 if (tree_expr_nan_p (arg) || tree_expr_infinite_p (arg))
8936 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8937 return NULL_TREE;
8939 case BUILT_IN_ISNAN:
8940 if (tree_expr_nan_p (arg))
8941 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8942 if (!tree_expr_maybe_nan_p (arg))
8943 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8946 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8947 if (is_ibm_extended)
8949 /* NaN and Inf are encoded in the high-order double value
8950 only. The low-order value is not significant. */
8951 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8954 arg = builtin_save_expr (arg);
8955 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8957 default:
8958 gcc_unreachable ();
8962 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8963 This builtin will generate code to return the appropriate floating
8964 point classification depending on the value of the floating point
8965 number passed in. The possible return values must be supplied as
8966 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8967 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8968 one floating point argument which is "type generic". */
8970 static tree
8971 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8973 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8974 arg, type, res, tmp;
8975 machine_mode mode;
8976 REAL_VALUE_TYPE r;
8977 char buf[128];
8979 /* Verify the required arguments in the original call. */
8980 if (nargs != 6
8981 || !validate_arg (args[0], INTEGER_TYPE)
8982 || !validate_arg (args[1], INTEGER_TYPE)
8983 || !validate_arg (args[2], INTEGER_TYPE)
8984 || !validate_arg (args[3], INTEGER_TYPE)
8985 || !validate_arg (args[4], INTEGER_TYPE)
8986 || !validate_arg (args[5], REAL_TYPE))
8987 return NULL_TREE;
8989 fp_nan = args[0];
8990 fp_infinite = args[1];
8991 fp_normal = args[2];
8992 fp_subnormal = args[3];
8993 fp_zero = args[4];
8994 arg = args[5];
8995 type = TREE_TYPE (arg);
8996 mode = TYPE_MODE (type);
8997 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8999 /* fpclassify(x) ->
9000 isnan(x) ? FP_NAN :
9001 (fabs(x) == Inf ? FP_INFINITE :
9002 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9003 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9005 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9006 build_real (type, dconst0));
9007 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9008 tmp, fp_zero, fp_subnormal);
9010 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9011 real_from_string (&r, buf);
9012 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9013 arg, build_real (type, r));
9014 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9016 if (tree_expr_maybe_infinite_p (arg))
9018 real_inf (&r);
9019 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9020 build_real (type, r));
9021 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9022 fp_infinite, res);
9025 if (tree_expr_maybe_nan_p (arg))
9027 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9028 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9031 return res;
9034 /* Fold a call to an unordered comparison function such as
9035 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9036 being called and ARG0 and ARG1 are the arguments for the call.
9037 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9038 the opposite of the desired result. UNORDERED_CODE is used
9039 for modes that can hold NaNs and ORDERED_CODE is used for
9040 the rest. */
9042 static tree
9043 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9044 enum tree_code unordered_code,
9045 enum tree_code ordered_code)
9047 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9048 enum tree_code code;
9049 tree type0, type1;
9050 enum tree_code code0, code1;
9051 tree cmp_type = NULL_TREE;
9053 type0 = TREE_TYPE (arg0);
9054 type1 = TREE_TYPE (arg1);
9056 code0 = TREE_CODE (type0);
9057 code1 = TREE_CODE (type1);
9059 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9060 /* Choose the wider of two real types. */
9061 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9062 ? type0 : type1;
9063 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9064 cmp_type = type0;
9065 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9066 cmp_type = type1;
9068 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9069 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9071 if (unordered_code == UNORDERED_EXPR)
9073 if (tree_expr_nan_p (arg0) || tree_expr_nan_p (arg1))
9074 return omit_two_operands_loc (loc, type, integer_one_node, arg0, arg1);
9075 if (!tree_expr_maybe_nan_p (arg0) && !tree_expr_maybe_nan_p (arg1))
9076 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9077 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9080 code = (tree_expr_maybe_nan_p (arg0) || tree_expr_maybe_nan_p (arg1))
9081 ? unordered_code : ordered_code;
9082 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9083 fold_build2_loc (loc, code, type, arg0, arg1));
9086 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9087 arithmetics if it can never overflow, or into internal functions that
9088 return both result of arithmetics and overflowed boolean flag in
9089 a complex integer result, or some other check for overflow.
9090 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9091 checking part of that. */
9093 static tree
9094 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9095 tree arg0, tree arg1, tree arg2)
9097 enum internal_fn ifn = IFN_LAST;
9098 /* The code of the expression corresponding to the built-in. */
9099 enum tree_code opcode = ERROR_MARK;
9100 bool ovf_only = false;
9102 switch (fcode)
9104 case BUILT_IN_ADD_OVERFLOW_P:
9105 ovf_only = true;
9106 /* FALLTHRU */
9107 case BUILT_IN_ADD_OVERFLOW:
9108 case BUILT_IN_SADD_OVERFLOW:
9109 case BUILT_IN_SADDL_OVERFLOW:
9110 case BUILT_IN_SADDLL_OVERFLOW:
9111 case BUILT_IN_UADD_OVERFLOW:
9112 case BUILT_IN_UADDL_OVERFLOW:
9113 case BUILT_IN_UADDLL_OVERFLOW:
9114 opcode = PLUS_EXPR;
9115 ifn = IFN_ADD_OVERFLOW;
9116 break;
9117 case BUILT_IN_SUB_OVERFLOW_P:
9118 ovf_only = true;
9119 /* FALLTHRU */
9120 case BUILT_IN_SUB_OVERFLOW:
9121 case BUILT_IN_SSUB_OVERFLOW:
9122 case BUILT_IN_SSUBL_OVERFLOW:
9123 case BUILT_IN_SSUBLL_OVERFLOW:
9124 case BUILT_IN_USUB_OVERFLOW:
9125 case BUILT_IN_USUBL_OVERFLOW:
9126 case BUILT_IN_USUBLL_OVERFLOW:
9127 opcode = MINUS_EXPR;
9128 ifn = IFN_SUB_OVERFLOW;
9129 break;
9130 case BUILT_IN_MUL_OVERFLOW_P:
9131 ovf_only = true;
9132 /* FALLTHRU */
9133 case BUILT_IN_MUL_OVERFLOW:
9134 case BUILT_IN_SMUL_OVERFLOW:
9135 case BUILT_IN_SMULL_OVERFLOW:
9136 case BUILT_IN_SMULLL_OVERFLOW:
9137 case BUILT_IN_UMUL_OVERFLOW:
9138 case BUILT_IN_UMULL_OVERFLOW:
9139 case BUILT_IN_UMULLL_OVERFLOW:
9140 opcode = MULT_EXPR;
9141 ifn = IFN_MUL_OVERFLOW;
9142 break;
9143 default:
9144 gcc_unreachable ();
9147 /* For the "generic" overloads, the first two arguments can have different
9148 types and the last argument determines the target type to use to check
9149 for overflow. The arguments of the other overloads all have the same
9150 type. */
9151 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9153 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9154 arguments are constant, attempt to fold the built-in call into a constant
9155 expression indicating whether or not it detected an overflow. */
9156 if (ovf_only
9157 && TREE_CODE (arg0) == INTEGER_CST
9158 && TREE_CODE (arg1) == INTEGER_CST)
9159 /* Perform the computation in the target type and check for overflow. */
9160 return omit_one_operand_loc (loc, boolean_type_node,
9161 arith_overflowed_p (opcode, type, arg0, arg1)
9162 ? boolean_true_node : boolean_false_node,
9163 arg2);
9165 tree intres, ovfres;
9166 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9168 intres = fold_binary_loc (loc, opcode, type,
9169 fold_convert_loc (loc, type, arg0),
9170 fold_convert_loc (loc, type, arg1));
9171 if (TREE_OVERFLOW (intres))
9172 intres = drop_tree_overflow (intres);
9173 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
9174 ? boolean_true_node : boolean_false_node);
9176 else
9178 tree ctype = build_complex_type (type);
9179 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
9180 arg0, arg1);
9181 tree tgt = save_expr (call);
9182 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9183 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9184 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9187 if (ovf_only)
9188 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9190 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9191 tree store
9192 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9193 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9196 /* Fold a call to __builtin_FILE to a constant string. */
9198 static inline tree
9199 fold_builtin_FILE (location_t loc)
9201 if (const char *fname = LOCATION_FILE (loc))
9203 /* The documentation says this builtin is equivalent to the preprocessor
9204 __FILE__ macro so it appears appropriate to use the same file prefix
9205 mappings. */
9206 fname = remap_macro_filename (fname);
9207 return build_string_literal (strlen (fname) + 1, fname);
9210 return build_string_literal (1, "");
9213 /* Fold a call to __builtin_FUNCTION to a constant string. */
9215 static inline tree
9216 fold_builtin_FUNCTION ()
9218 const char *name = "";
9220 if (current_function_decl)
9221 name = lang_hooks.decl_printable_name (current_function_decl, 0);
9223 return build_string_literal (strlen (name) + 1, name);
9226 /* Fold a call to __builtin_LINE to an integer constant. */
9228 static inline tree
9229 fold_builtin_LINE (location_t loc, tree type)
9231 return build_int_cst (type, LOCATION_LINE (loc));
9234 /* Fold a call to built-in function FNDECL with 0 arguments.
9235 This function returns NULL_TREE if no simplification was possible. */
9237 static tree
9238 fold_builtin_0 (location_t loc, tree fndecl)
9240 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9241 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9242 switch (fcode)
9244 case BUILT_IN_FILE:
9245 return fold_builtin_FILE (loc);
9247 case BUILT_IN_FUNCTION:
9248 return fold_builtin_FUNCTION ();
9250 case BUILT_IN_LINE:
9251 return fold_builtin_LINE (loc, type);
9253 CASE_FLT_FN (BUILT_IN_INF):
9254 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
9255 case BUILT_IN_INFD32:
9256 case BUILT_IN_INFD64:
9257 case BUILT_IN_INFD128:
9258 return fold_builtin_inf (loc, type, true);
9260 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9261 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
9262 return fold_builtin_inf (loc, type, false);
9264 case BUILT_IN_CLASSIFY_TYPE:
9265 return fold_builtin_classify_type (NULL_TREE);
9267 case BUILT_IN_UNREACHABLE:
9268 /* Rewrite any explicit calls to __builtin_unreachable. */
9269 if (sanitize_flags_p (SANITIZE_UNREACHABLE))
9270 return build_builtin_unreachable (loc);
9271 break;
9273 default:
9274 break;
9276 return NULL_TREE;
9279 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9280 This function returns NULL_TREE if no simplification was possible. */
9282 static tree
9283 fold_builtin_1 (location_t loc, tree expr, tree fndecl, tree arg0)
9285 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9286 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9288 if (TREE_CODE (arg0) == ERROR_MARK)
9289 return NULL_TREE;
9291 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
9292 return ret;
9294 switch (fcode)
9296 case BUILT_IN_CONSTANT_P:
9298 tree val = fold_builtin_constant_p (arg0);
9300 /* Gimplification will pull the CALL_EXPR for the builtin out of
9301 an if condition. When not optimizing, we'll not CSE it back.
9302 To avoid link error types of regressions, return false now. */
9303 if (!val && !optimize)
9304 val = integer_zero_node;
9306 return val;
9309 case BUILT_IN_CLASSIFY_TYPE:
9310 return fold_builtin_classify_type (arg0);
9312 case BUILT_IN_STRLEN:
9313 return fold_builtin_strlen (loc, expr, type, arg0);
9315 CASE_FLT_FN (BUILT_IN_FABS):
9316 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9317 case BUILT_IN_FABSD32:
9318 case BUILT_IN_FABSD64:
9319 case BUILT_IN_FABSD128:
9320 return fold_builtin_fabs (loc, arg0, type);
9322 case BUILT_IN_ABS:
9323 case BUILT_IN_LABS:
9324 case BUILT_IN_LLABS:
9325 case BUILT_IN_IMAXABS:
9326 return fold_builtin_abs (loc, arg0, type);
9328 CASE_FLT_FN (BUILT_IN_CONJ):
9329 if (validate_arg (arg0, COMPLEX_TYPE)
9330 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9331 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9332 break;
9334 CASE_FLT_FN (BUILT_IN_CREAL):
9335 if (validate_arg (arg0, COMPLEX_TYPE)
9336 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9337 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9338 break;
9340 CASE_FLT_FN (BUILT_IN_CIMAG):
9341 if (validate_arg (arg0, COMPLEX_TYPE)
9342 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9343 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9344 break;
9346 CASE_FLT_FN (BUILT_IN_CARG):
9347 return fold_builtin_carg (loc, arg0, type);
9349 case BUILT_IN_ISASCII:
9350 return fold_builtin_isascii (loc, arg0);
9352 case BUILT_IN_TOASCII:
9353 return fold_builtin_toascii (loc, arg0);
9355 case BUILT_IN_ISDIGIT:
9356 return fold_builtin_isdigit (loc, arg0);
9358 CASE_FLT_FN (BUILT_IN_FINITE):
9359 case BUILT_IN_FINITED32:
9360 case BUILT_IN_FINITED64:
9361 case BUILT_IN_FINITED128:
9362 case BUILT_IN_ISFINITE:
9364 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9365 if (ret)
9366 return ret;
9367 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9370 CASE_FLT_FN (BUILT_IN_ISINF):
9371 case BUILT_IN_ISINFD32:
9372 case BUILT_IN_ISINFD64:
9373 case BUILT_IN_ISINFD128:
9375 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9376 if (ret)
9377 return ret;
9378 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9381 case BUILT_IN_ISNORMAL:
9382 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9384 case BUILT_IN_ISINF_SIGN:
9385 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9387 CASE_FLT_FN (BUILT_IN_ISNAN):
9388 case BUILT_IN_ISNAND32:
9389 case BUILT_IN_ISNAND64:
9390 case BUILT_IN_ISNAND128:
9391 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9393 case BUILT_IN_FREE:
9394 if (integer_zerop (arg0))
9395 return build_empty_stmt (loc);
9396 break;
9398 default:
9399 break;
9402 return NULL_TREE;
9406 /* Folds a call EXPR (which may be null) to built-in function FNDECL
9407 with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
9408 if no simplification was possible. */
9410 static tree
9411 fold_builtin_2 (location_t loc, tree expr, tree fndecl, tree arg0, tree arg1)
9413 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9414 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9416 if (TREE_CODE (arg0) == ERROR_MARK
9417 || TREE_CODE (arg1) == ERROR_MARK)
9418 return NULL_TREE;
9420 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9421 return ret;
9423 switch (fcode)
9425 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9426 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9427 if (validate_arg (arg0, REAL_TYPE)
9428 && validate_arg (arg1, POINTER_TYPE))
9429 return do_mpfr_lgamma_r (arg0, arg1, type);
9430 break;
9432 CASE_FLT_FN (BUILT_IN_FREXP):
9433 return fold_builtin_frexp (loc, arg0, arg1, type);
9435 CASE_FLT_FN (BUILT_IN_MODF):
9436 return fold_builtin_modf (loc, arg0, arg1, type);
9438 case BUILT_IN_STRSPN:
9439 return fold_builtin_strspn (loc, expr, arg0, arg1);
9441 case BUILT_IN_STRCSPN:
9442 return fold_builtin_strcspn (loc, expr, arg0, arg1);
9444 case BUILT_IN_STRPBRK:
9445 return fold_builtin_strpbrk (loc, expr, arg0, arg1, type);
9447 case BUILT_IN_EXPECT:
9448 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
9450 case BUILT_IN_ISGREATER:
9451 return fold_builtin_unordered_cmp (loc, fndecl,
9452 arg0, arg1, UNLE_EXPR, LE_EXPR);
9453 case BUILT_IN_ISGREATEREQUAL:
9454 return fold_builtin_unordered_cmp (loc, fndecl,
9455 arg0, arg1, UNLT_EXPR, LT_EXPR);
9456 case BUILT_IN_ISLESS:
9457 return fold_builtin_unordered_cmp (loc, fndecl,
9458 arg0, arg1, UNGE_EXPR, GE_EXPR);
9459 case BUILT_IN_ISLESSEQUAL:
9460 return fold_builtin_unordered_cmp (loc, fndecl,
9461 arg0, arg1, UNGT_EXPR, GT_EXPR);
9462 case BUILT_IN_ISLESSGREATER:
9463 return fold_builtin_unordered_cmp (loc, fndecl,
9464 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9465 case BUILT_IN_ISUNORDERED:
9466 return fold_builtin_unordered_cmp (loc, fndecl,
9467 arg0, arg1, UNORDERED_EXPR,
9468 NOP_EXPR);
9470 /* We do the folding for va_start in the expander. */
9471 case BUILT_IN_VA_START:
9472 break;
9474 case BUILT_IN_OBJECT_SIZE:
9475 case BUILT_IN_DYNAMIC_OBJECT_SIZE:
9476 return fold_builtin_object_size (arg0, arg1, fcode);
9478 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9479 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9481 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9482 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9484 default:
9485 break;
9487 return NULL_TREE;
9490 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9491 and ARG2.
9492 This function returns NULL_TREE if no simplification was possible. */
9494 static tree
9495 fold_builtin_3 (location_t loc, tree fndecl,
9496 tree arg0, tree arg1, tree arg2)
9498 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9499 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9501 if (TREE_CODE (arg0) == ERROR_MARK
9502 || TREE_CODE (arg1) == ERROR_MARK
9503 || TREE_CODE (arg2) == ERROR_MARK)
9504 return NULL_TREE;
9506 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9507 arg0, arg1, arg2))
9508 return ret;
9510 switch (fcode)
9513 CASE_FLT_FN (BUILT_IN_SINCOS):
9514 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9516 CASE_FLT_FN (BUILT_IN_REMQUO):
9517 if (validate_arg (arg0, REAL_TYPE)
9518 && validate_arg (arg1, REAL_TYPE)
9519 && validate_arg (arg2, POINTER_TYPE))
9520 return do_mpfr_remquo (arg0, arg1, arg2);
9521 break;
9523 case BUILT_IN_MEMCMP:
9524 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9526 case BUILT_IN_EXPECT:
9527 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
9529 case BUILT_IN_EXPECT_WITH_PROBABILITY:
9530 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
9532 case BUILT_IN_ADD_OVERFLOW:
9533 case BUILT_IN_SUB_OVERFLOW:
9534 case BUILT_IN_MUL_OVERFLOW:
9535 case BUILT_IN_ADD_OVERFLOW_P:
9536 case BUILT_IN_SUB_OVERFLOW_P:
9537 case BUILT_IN_MUL_OVERFLOW_P:
9538 case BUILT_IN_SADD_OVERFLOW:
9539 case BUILT_IN_SADDL_OVERFLOW:
9540 case BUILT_IN_SADDLL_OVERFLOW:
9541 case BUILT_IN_SSUB_OVERFLOW:
9542 case BUILT_IN_SSUBL_OVERFLOW:
9543 case BUILT_IN_SSUBLL_OVERFLOW:
9544 case BUILT_IN_SMUL_OVERFLOW:
9545 case BUILT_IN_SMULL_OVERFLOW:
9546 case BUILT_IN_SMULLL_OVERFLOW:
9547 case BUILT_IN_UADD_OVERFLOW:
9548 case BUILT_IN_UADDL_OVERFLOW:
9549 case BUILT_IN_UADDLL_OVERFLOW:
9550 case BUILT_IN_USUB_OVERFLOW:
9551 case BUILT_IN_USUBL_OVERFLOW:
9552 case BUILT_IN_USUBLL_OVERFLOW:
9553 case BUILT_IN_UMUL_OVERFLOW:
9554 case BUILT_IN_UMULL_OVERFLOW:
9555 case BUILT_IN_UMULLL_OVERFLOW:
9556 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9558 default:
9559 break;
9561 return NULL_TREE;
9564 /* Folds a call EXPR (which may be null) to built-in function FNDECL.
9565 ARGS is an array of NARGS arguments. IGNORE is true if the result
9566 of the function call is ignored. This function returns NULL_TREE
9567 if no simplification was possible. */
9569 static tree
9570 fold_builtin_n (location_t loc, tree expr, tree fndecl, tree *args,
9571 int nargs, bool)
9573 tree ret = NULL_TREE;
9575 switch (nargs)
9577 case 0:
9578 ret = fold_builtin_0 (loc, fndecl);
9579 break;
9580 case 1:
9581 ret = fold_builtin_1 (loc, expr, fndecl, args[0]);
9582 break;
9583 case 2:
9584 ret = fold_builtin_2 (loc, expr, fndecl, args[0], args[1]);
9585 break;
9586 case 3:
9587 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9588 break;
9589 default:
9590 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9591 break;
9593 if (ret)
9595 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9596 SET_EXPR_LOCATION (ret, loc);
9597 return ret;
9599 return NULL_TREE;
9602 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9603 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9604 of arguments in ARGS to be omitted. OLDNARGS is the number of
9605 elements in ARGS. */
9607 static tree
9608 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9609 int skip, tree fndecl, int n, va_list newargs)
9611 int nargs = oldnargs - skip + n;
9612 tree *buffer;
9614 if (n > 0)
9616 int i, j;
9618 buffer = XALLOCAVEC (tree, nargs);
9619 for (i = 0; i < n; i++)
9620 buffer[i] = va_arg (newargs, tree);
9621 for (j = skip; j < oldnargs; j++, i++)
9622 buffer[i] = args[j];
9624 else
9625 buffer = args + skip;
9627 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9630 /* Return true if FNDECL shouldn't be folded right now.
9631 If a built-in function has an inline attribute always_inline
9632 wrapper, defer folding it after always_inline functions have
9633 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9634 might not be performed. */
9636 bool
9637 avoid_folding_inline_builtin (tree fndecl)
9639 return (DECL_DECLARED_INLINE_P (fndecl)
9640 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9641 && cfun
9642 && !cfun->always_inline_functions_inlined
9643 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9646 /* A wrapper function for builtin folding that prevents warnings for
9647 "statement without effect" and the like, caused by removing the
9648 call node earlier than the warning is generated. */
9650 tree
9651 fold_call_expr (location_t loc, tree exp, bool ignore)
9653 tree ret = NULL_TREE;
9654 tree fndecl = get_callee_fndecl (exp);
9655 if (fndecl && fndecl_built_in_p (fndecl)
9656 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9657 yet. Defer folding until we see all the arguments
9658 (after inlining). */
9659 && !CALL_EXPR_VA_ARG_PACK (exp))
9661 int nargs = call_expr_nargs (exp);
9663 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9664 instead last argument is __builtin_va_arg_pack (). Defer folding
9665 even in that case, until arguments are finalized. */
9666 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9668 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9669 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9670 return NULL_TREE;
9673 if (avoid_folding_inline_builtin (fndecl))
9674 return NULL_TREE;
9676 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9677 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9678 CALL_EXPR_ARGP (exp), ignore);
9679 else
9681 tree *args = CALL_EXPR_ARGP (exp);
9682 ret = fold_builtin_n (loc, exp, fndecl, args, nargs, ignore);
9683 if (ret)
9684 return ret;
9687 return NULL_TREE;
9690 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9691 N arguments are passed in the array ARGARRAY. Return a folded
9692 expression or NULL_TREE if no simplification was possible. */
9694 tree
9695 fold_builtin_call_array (location_t loc, tree,
9696 tree fn,
9697 int n,
9698 tree *argarray)
9700 if (TREE_CODE (fn) != ADDR_EXPR)
9701 return NULL_TREE;
9703 tree fndecl = TREE_OPERAND (fn, 0);
9704 if (TREE_CODE (fndecl) == FUNCTION_DECL
9705 && fndecl_built_in_p (fndecl))
9707 /* If last argument is __builtin_va_arg_pack (), arguments to this
9708 function are not finalized yet. Defer folding until they are. */
9709 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9711 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9712 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9713 return NULL_TREE;
9715 if (avoid_folding_inline_builtin (fndecl))
9716 return NULL_TREE;
9717 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9718 return targetm.fold_builtin (fndecl, n, argarray, false);
9719 else
9720 return fold_builtin_n (loc, NULL_TREE, fndecl, argarray, n, false);
9723 return NULL_TREE;
9726 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9727 along with N new arguments specified as the "..." parameters. SKIP
9728 is the number of arguments in EXP to be omitted. This function is used
9729 to do varargs-to-varargs transformations. */
9731 static tree
9732 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9734 va_list ap;
9735 tree t;
9737 va_start (ap, n);
9738 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9739 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9740 va_end (ap);
9742 return t;
9745 /* Validate a single argument ARG against a tree code CODE representing
9746 a type. Return true when argument is valid. */
9748 static bool
9749 validate_arg (const_tree arg, enum tree_code code)
9751 if (!arg)
9752 return false;
9753 else if (code == POINTER_TYPE)
9754 return POINTER_TYPE_P (TREE_TYPE (arg));
9755 else if (code == INTEGER_TYPE)
9756 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9757 return code == TREE_CODE (TREE_TYPE (arg));
9760 /* This function validates the types of a function call argument list
9761 against a specified list of tree_codes. If the last specifier is a 0,
9762 that represents an ellipses, otherwise the last specifier must be a
9763 VOID_TYPE.
9765 This is the GIMPLE version of validate_arglist. Eventually we want to
9766 completely convert builtins.cc to work from GIMPLEs and the tree based
9767 validate_arglist will then be removed. */
9769 bool
9770 validate_gimple_arglist (const gcall *call, ...)
9772 enum tree_code code;
9773 bool res = 0;
9774 va_list ap;
9775 const_tree arg;
9776 size_t i;
9778 va_start (ap, call);
9779 i = 0;
9783 code = (enum tree_code) va_arg (ap, int);
9784 switch (code)
9786 case 0:
9787 /* This signifies an ellipses, any further arguments are all ok. */
9788 res = true;
9789 goto end;
9790 case VOID_TYPE:
9791 /* This signifies an endlink, if no arguments remain, return
9792 true, otherwise return false. */
9793 res = (i == gimple_call_num_args (call));
9794 goto end;
9795 default:
9796 /* If no parameters remain or the parameter's code does not
9797 match the specified code, return false. Otherwise continue
9798 checking any remaining arguments. */
9799 arg = gimple_call_arg (call, i++);
9800 if (!validate_arg (arg, code))
9801 goto end;
9802 break;
9805 while (1);
9807 /* We need gotos here since we can only have one VA_CLOSE in a
9808 function. */
9809 end: ;
9810 va_end (ap);
9812 return res;
9815 /* Default target-specific builtin expander that does nothing. */
9818 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9819 rtx target ATTRIBUTE_UNUSED,
9820 rtx subtarget ATTRIBUTE_UNUSED,
9821 machine_mode mode ATTRIBUTE_UNUSED,
9822 int ignore ATTRIBUTE_UNUSED)
9824 return NULL_RTX;
9827 /* Returns true is EXP represents data that would potentially reside
9828 in a readonly section. */
9830 bool
9831 readonly_data_expr (tree exp)
9833 STRIP_NOPS (exp);
9835 if (TREE_CODE (exp) != ADDR_EXPR)
9836 return false;
9838 exp = get_base_address (TREE_OPERAND (exp, 0));
9839 if (!exp)
9840 return false;
9842 /* Make sure we call decl_readonly_section only for trees it
9843 can handle (since it returns true for everything it doesn't
9844 understand). */
9845 if (TREE_CODE (exp) == STRING_CST
9846 || TREE_CODE (exp) == CONSTRUCTOR
9847 || (VAR_P (exp) && TREE_STATIC (exp)))
9848 return decl_readonly_section (exp, 0);
9849 else
9850 return false;
9853 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9854 to the call, and TYPE is its return type.
9856 Return NULL_TREE if no simplification was possible, otherwise return the
9857 simplified form of the call as a tree.
9859 The simplified form may be a constant or other expression which
9860 computes the same value, but in a more efficient manner (including
9861 calls to other builtin functions).
9863 The call may contain arguments which need to be evaluated, but
9864 which are not useful to determine the result of the call. In
9865 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9866 COMPOUND_EXPR will be an argument which must be evaluated.
9867 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9868 COMPOUND_EXPR in the chain will contain the tree for the simplified
9869 form of the builtin function call. */
9871 static tree
9872 fold_builtin_strpbrk (location_t loc, tree, tree s1, tree s2, tree type)
9874 if (!validate_arg (s1, POINTER_TYPE)
9875 || !validate_arg (s2, POINTER_TYPE))
9876 return NULL_TREE;
9878 tree fn;
9879 const char *p1, *p2;
9881 p2 = c_getstr (s2);
9882 if (p2 == NULL)
9883 return NULL_TREE;
9885 p1 = c_getstr (s1);
9886 if (p1 != NULL)
9888 const char *r = strpbrk (p1, p2);
9889 tree tem;
9891 if (r == NULL)
9892 return build_int_cst (TREE_TYPE (s1), 0);
9894 /* Return an offset into the constant string argument. */
9895 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9896 return fold_convert_loc (loc, type, tem);
9899 if (p2[0] == '\0')
9900 /* strpbrk(x, "") == NULL.
9901 Evaluate and ignore s1 in case it had side-effects. */
9902 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
9904 if (p2[1] != '\0')
9905 return NULL_TREE; /* Really call strpbrk. */
9907 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9908 if (!fn)
9909 return NULL_TREE;
9911 /* New argument list transforming strpbrk(s1, s2) to
9912 strchr(s1, s2[0]). */
9913 return build_call_expr_loc (loc, fn, 2, s1,
9914 build_int_cst (integer_type_node, p2[0]));
9917 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9918 to the call.
9920 Return NULL_TREE if no simplification was possible, otherwise return the
9921 simplified form of the call as a tree.
9923 The simplified form may be a constant or other expression which
9924 computes the same value, but in a more efficient manner (including
9925 calls to other builtin functions).
9927 The call may contain arguments which need to be evaluated, but
9928 which are not useful to determine the result of the call. In
9929 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9930 COMPOUND_EXPR will be an argument which must be evaluated.
9931 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9932 COMPOUND_EXPR in the chain will contain the tree for the simplified
9933 form of the builtin function call. */
9935 static tree
9936 fold_builtin_strspn (location_t loc, tree expr, tree s1, tree s2)
9938 if (!validate_arg (s1, POINTER_TYPE)
9939 || !validate_arg (s2, POINTER_TYPE))
9940 return NULL_TREE;
9942 if (!check_nul_terminated_array (expr, s1)
9943 || !check_nul_terminated_array (expr, s2))
9944 return NULL_TREE;
9946 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9948 /* If either argument is "", return NULL_TREE. */
9949 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9950 /* Evaluate and ignore both arguments in case either one has
9951 side-effects. */
9952 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9953 s1, s2);
9954 return NULL_TREE;
9957 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9958 to the call.
9960 Return NULL_TREE if no simplification was possible, otherwise return the
9961 simplified form of the call as a tree.
9963 The simplified form may be a constant or other expression which
9964 computes the same value, but in a more efficient manner (including
9965 calls to other builtin functions).
9967 The call may contain arguments which need to be evaluated, but
9968 which are not useful to determine the result of the call. In
9969 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9970 COMPOUND_EXPR will be an argument which must be evaluated.
9971 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9972 COMPOUND_EXPR in the chain will contain the tree for the simplified
9973 form of the builtin function call. */
9975 static tree
9976 fold_builtin_strcspn (location_t loc, tree expr, tree s1, tree s2)
9978 if (!validate_arg (s1, POINTER_TYPE)
9979 || !validate_arg (s2, POINTER_TYPE))
9980 return NULL_TREE;
9982 if (!check_nul_terminated_array (expr, s1)
9983 || !check_nul_terminated_array (expr, s2))
9984 return NULL_TREE;
9986 /* If the first argument is "", return NULL_TREE. */
9987 const char *p1 = c_getstr (s1);
9988 if (p1 && *p1 == '\0')
9990 /* Evaluate and ignore argument s2 in case it has
9991 side-effects. */
9992 return omit_one_operand_loc (loc, size_type_node,
9993 size_zero_node, s2);
9996 /* If the second argument is "", return __builtin_strlen(s1). */
9997 const char *p2 = c_getstr (s2);
9998 if (p2 && *p2 == '\0')
10000 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
10002 /* If the replacement _DECL isn't initialized, don't do the
10003 transformation. */
10004 if (!fn)
10005 return NULL_TREE;
10007 return build_call_expr_loc (loc, fn, 1, s1);
10009 return NULL_TREE;
10012 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10013 produced. False otherwise. This is done so that we don't output the error
10014 or warning twice or three times. */
10016 bool
10017 fold_builtin_next_arg (tree exp, bool va_start_p)
10019 tree fntype = TREE_TYPE (current_function_decl);
10020 int nargs = call_expr_nargs (exp);
10021 tree arg;
10022 /* There is good chance the current input_location points inside the
10023 definition of the va_start macro (perhaps on the token for
10024 builtin) in a system header, so warnings will not be emitted.
10025 Use the location in real source code. */
10026 location_t current_location =
10027 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10028 NULL);
10030 if (!stdarg_p (fntype))
10032 error ("%<va_start%> used in function with fixed arguments");
10033 return true;
10036 if (va_start_p)
10038 if (va_start_p && (nargs != 2))
10040 error ("wrong number of arguments to function %<va_start%>");
10041 return true;
10043 arg = CALL_EXPR_ARG (exp, 1);
10045 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10046 when we checked the arguments and if needed issued a warning. */
10047 else
10049 if (nargs == 0)
10051 /* Evidently an out of date version of <stdarg.h>; can't validate
10052 va_start's second argument, but can still work as intended. */
10053 warning_at (current_location,
10054 OPT_Wvarargs,
10055 "%<__builtin_next_arg%> called without an argument");
10056 return true;
10058 else if (nargs > 1)
10060 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10061 return true;
10063 arg = CALL_EXPR_ARG (exp, 0);
10066 if (TREE_CODE (arg) == SSA_NAME
10067 && SSA_NAME_VAR (arg))
10068 arg = SSA_NAME_VAR (arg);
10070 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10071 or __builtin_next_arg (0) the first time we see it, after checking
10072 the arguments and if needed issuing a warning. */
10073 if (!integer_zerop (arg))
10075 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10077 /* Strip off all nops for the sake of the comparison. This
10078 is not quite the same as STRIP_NOPS. It does more.
10079 We must also strip off INDIRECT_EXPR for C++ reference
10080 parameters. */
10081 while (CONVERT_EXPR_P (arg)
10082 || TREE_CODE (arg) == INDIRECT_REF)
10083 arg = TREE_OPERAND (arg, 0);
10084 if (arg != last_parm)
10086 /* FIXME: Sometimes with the tree optimizers we can get the
10087 not the last argument even though the user used the last
10088 argument. We just warn and set the arg to be the last
10089 argument so that we will get wrong-code because of
10090 it. */
10091 warning_at (current_location,
10092 OPT_Wvarargs,
10093 "second parameter of %<va_start%> not last named argument");
10096 /* Undefined by C99 7.15.1.4p4 (va_start):
10097 "If the parameter parmN is declared with the register storage
10098 class, with a function or array type, or with a type that is
10099 not compatible with the type that results after application of
10100 the default argument promotions, the behavior is undefined."
10102 else if (DECL_REGISTER (arg))
10104 warning_at (current_location,
10105 OPT_Wvarargs,
10106 "undefined behavior when second parameter of "
10107 "%<va_start%> is declared with %<register%> storage");
10110 /* We want to verify the second parameter just once before the tree
10111 optimizers are run and then avoid keeping it in the tree,
10112 as otherwise we could warn even for correct code like:
10113 void foo (int i, ...)
10114 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10115 if (va_start_p)
10116 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10117 else
10118 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10120 return false;
10124 /* Expand a call EXP to __builtin_object_size. */
10126 static rtx
10127 expand_builtin_object_size (tree exp)
10129 tree ost;
10130 int object_size_type;
10131 tree fndecl = get_callee_fndecl (exp);
10133 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10135 error ("first argument of %qD must be a pointer, second integer constant",
10136 fndecl);
10137 expand_builtin_trap ();
10138 return const0_rtx;
10141 ost = CALL_EXPR_ARG (exp, 1);
10142 STRIP_NOPS (ost);
10144 if (TREE_CODE (ost) != INTEGER_CST
10145 || tree_int_cst_sgn (ost) < 0
10146 || compare_tree_int (ost, 3) > 0)
10148 error ("last argument of %qD is not integer constant between 0 and 3",
10149 fndecl);
10150 expand_builtin_trap ();
10151 return const0_rtx;
10154 object_size_type = tree_to_shwi (ost);
10156 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10159 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10160 FCODE is the BUILT_IN_* to use.
10161 Return NULL_RTX if we failed; the caller should emit a normal call,
10162 otherwise try to get the result in TARGET, if convenient (and in
10163 mode MODE if that's convenient). */
10165 static rtx
10166 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10167 enum built_in_function fcode)
10169 if (!validate_arglist (exp,
10170 POINTER_TYPE,
10171 fcode == BUILT_IN_MEMSET_CHK
10172 ? INTEGER_TYPE : POINTER_TYPE,
10173 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10174 return NULL_RTX;
10176 tree dest = CALL_EXPR_ARG (exp, 0);
10177 tree src = CALL_EXPR_ARG (exp, 1);
10178 tree len = CALL_EXPR_ARG (exp, 2);
10179 tree size = CALL_EXPR_ARG (exp, 3);
10181 /* FIXME: Set access mode to write only for memset et al. */
10182 bool sizes_ok = check_access (exp, len, /*maxread=*/NULL_TREE,
10183 /*srcstr=*/NULL_TREE, size, access_read_write);
10185 if (!tree_fits_uhwi_p (size))
10186 return NULL_RTX;
10188 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10190 /* Avoid transforming the checking call to an ordinary one when
10191 an overflow has been detected or when the call couldn't be
10192 validated because the size is not constant. */
10193 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10194 return NULL_RTX;
10196 tree fn = NULL_TREE;
10197 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10198 mem{cpy,pcpy,move,set} is available. */
10199 switch (fcode)
10201 case BUILT_IN_MEMCPY_CHK:
10202 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10203 break;
10204 case BUILT_IN_MEMPCPY_CHK:
10205 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10206 break;
10207 case BUILT_IN_MEMMOVE_CHK:
10208 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10209 break;
10210 case BUILT_IN_MEMSET_CHK:
10211 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10212 break;
10213 default:
10214 break;
10217 if (! fn)
10218 return NULL_RTX;
10220 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10221 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10222 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10223 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10225 else if (fcode == BUILT_IN_MEMSET_CHK)
10226 return NULL_RTX;
10227 else
10229 unsigned int dest_align = get_pointer_alignment (dest);
10231 /* If DEST is not a pointer type, call the normal function. */
10232 if (dest_align == 0)
10233 return NULL_RTX;
10235 /* If SRC and DEST are the same (and not volatile), do nothing. */
10236 if (operand_equal_p (src, dest, 0))
10238 tree expr;
10240 if (fcode != BUILT_IN_MEMPCPY_CHK)
10242 /* Evaluate and ignore LEN in case it has side-effects. */
10243 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10244 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10247 expr = fold_build_pointer_plus (dest, len);
10248 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10251 /* __memmove_chk special case. */
10252 if (fcode == BUILT_IN_MEMMOVE_CHK)
10254 unsigned int src_align = get_pointer_alignment (src);
10256 if (src_align == 0)
10257 return NULL_RTX;
10259 /* If src is categorized for a readonly section we can use
10260 normal __memcpy_chk. */
10261 if (readonly_data_expr (src))
10263 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10264 if (!fn)
10265 return NULL_RTX;
10266 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10267 dest, src, len, size);
10268 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10269 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10270 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10273 return NULL_RTX;
10277 /* Emit warning if a buffer overflow is detected at compile time. */
10279 static void
10280 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10282 /* The source string. */
10283 tree srcstr = NULL_TREE;
10284 /* The size of the destination object returned by __builtin_object_size. */
10285 tree objsize = NULL_TREE;
10286 /* The string that is being concatenated with (as in __strcat_chk)
10287 or null if it isn't. */
10288 tree catstr = NULL_TREE;
10289 /* The maximum length of the source sequence in a bounded operation
10290 (such as __strncat_chk) or null if the operation isn't bounded
10291 (such as __strcat_chk). */
10292 tree maxread = NULL_TREE;
10293 /* The exact size of the access (such as in __strncpy_chk). */
10294 tree size = NULL_TREE;
10295 /* The access by the function that's checked. Except for snprintf
10296 both writing and reading is checked. */
10297 access_mode mode = access_read_write;
10299 switch (fcode)
10301 case BUILT_IN_STRCPY_CHK:
10302 case BUILT_IN_STPCPY_CHK:
10303 srcstr = CALL_EXPR_ARG (exp, 1);
10304 objsize = CALL_EXPR_ARG (exp, 2);
10305 break;
10307 case BUILT_IN_STRCAT_CHK:
10308 /* For __strcat_chk the warning will be emitted only if overflowing
10309 by at least strlen (dest) + 1 bytes. */
10310 catstr = CALL_EXPR_ARG (exp, 0);
10311 srcstr = CALL_EXPR_ARG (exp, 1);
10312 objsize = CALL_EXPR_ARG (exp, 2);
10313 break;
10315 case BUILT_IN_STRNCAT_CHK:
10316 catstr = CALL_EXPR_ARG (exp, 0);
10317 srcstr = CALL_EXPR_ARG (exp, 1);
10318 maxread = CALL_EXPR_ARG (exp, 2);
10319 objsize = CALL_EXPR_ARG (exp, 3);
10320 break;
10322 case BUILT_IN_STRNCPY_CHK:
10323 case BUILT_IN_STPNCPY_CHK:
10324 srcstr = CALL_EXPR_ARG (exp, 1);
10325 size = CALL_EXPR_ARG (exp, 2);
10326 objsize = CALL_EXPR_ARG (exp, 3);
10327 break;
10329 case BUILT_IN_SNPRINTF_CHK:
10330 case BUILT_IN_VSNPRINTF_CHK:
10331 maxread = CALL_EXPR_ARG (exp, 1);
10332 objsize = CALL_EXPR_ARG (exp, 3);
10333 /* The only checked access the write to the destination. */
10334 mode = access_write_only;
10335 break;
10336 default:
10337 gcc_unreachable ();
10340 if (catstr && maxread)
10342 /* Check __strncat_chk. There is no way to determine the length
10343 of the string to which the source string is being appended so
10344 just warn when the length of the source string is not known. */
10345 check_strncat_sizes (exp, objsize);
10346 return;
10349 check_access (exp, size, maxread, srcstr, objsize, mode);
10352 /* Emit warning if a buffer overflow is detected at compile time
10353 in __sprintf_chk/__vsprintf_chk calls. */
10355 static void
10356 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10358 tree size, len, fmt;
10359 const char *fmt_str;
10360 int nargs = call_expr_nargs (exp);
10362 /* Verify the required arguments in the original call. */
10364 if (nargs < 4)
10365 return;
10366 size = CALL_EXPR_ARG (exp, 2);
10367 fmt = CALL_EXPR_ARG (exp, 3);
10369 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10370 return;
10372 /* Check whether the format is a literal string constant. */
10373 fmt_str = c_getstr (fmt);
10374 if (fmt_str == NULL)
10375 return;
10377 if (!init_target_chars ())
10378 return;
10380 /* If the format doesn't contain % args or %%, we know its size. */
10381 if (strchr (fmt_str, target_percent) == 0)
10382 len = build_int_cstu (size_type_node, strlen (fmt_str));
10383 /* If the format is "%s" and first ... argument is a string literal,
10384 we know it too. */
10385 else if (fcode == BUILT_IN_SPRINTF_CHK
10386 && strcmp (fmt_str, target_percent_s) == 0)
10388 tree arg;
10390 if (nargs < 5)
10391 return;
10392 arg = CALL_EXPR_ARG (exp, 4);
10393 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10394 return;
10396 len = c_strlen (arg, 1);
10397 if (!len || ! tree_fits_uhwi_p (len))
10398 return;
10400 else
10401 return;
10403 /* Add one for the terminating nul. */
10404 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10406 check_access (exp, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, len, size,
10407 access_write_only);
10410 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10411 if possible. */
10413 static tree
10414 fold_builtin_object_size (tree ptr, tree ost, enum built_in_function fcode)
10416 tree bytes;
10417 int object_size_type;
10419 if (!validate_arg (ptr, POINTER_TYPE)
10420 || !validate_arg (ost, INTEGER_TYPE))
10421 return NULL_TREE;
10423 STRIP_NOPS (ost);
10425 if (TREE_CODE (ost) != INTEGER_CST
10426 || tree_int_cst_sgn (ost) < 0
10427 || compare_tree_int (ost, 3) > 0)
10428 return NULL_TREE;
10430 object_size_type = tree_to_shwi (ost);
10432 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10433 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10434 and (size_t) 0 for types 2 and 3. */
10435 if (TREE_SIDE_EFFECTS (ptr))
10436 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10438 if (fcode == BUILT_IN_DYNAMIC_OBJECT_SIZE)
10439 object_size_type |= OST_DYNAMIC;
10441 if (TREE_CODE (ptr) == ADDR_EXPR)
10443 compute_builtin_object_size (ptr, object_size_type, &bytes);
10444 if ((object_size_type & OST_DYNAMIC)
10445 || int_fits_type_p (bytes, size_type_node))
10446 return fold_convert (size_type_node, bytes);
10448 else if (TREE_CODE (ptr) == SSA_NAME)
10450 /* If object size is not known yet, delay folding until
10451 later. Maybe subsequent passes will help determining
10452 it. */
10453 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10454 && ((object_size_type & OST_DYNAMIC)
10455 || int_fits_type_p (bytes, size_type_node)))
10456 return fold_convert (size_type_node, bytes);
10459 return NULL_TREE;
10462 /* Builtins with folding operations that operate on "..." arguments
10463 need special handling; we need to store the arguments in a convenient
10464 data structure before attempting any folding. Fortunately there are
10465 only a few builtins that fall into this category. FNDECL is the
10466 function, EXP is the CALL_EXPR for the call. */
10468 static tree
10469 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10471 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10472 tree ret = NULL_TREE;
10474 switch (fcode)
10476 case BUILT_IN_FPCLASSIFY:
10477 ret = fold_builtin_fpclassify (loc, args, nargs);
10478 break;
10480 default:
10481 break;
10483 if (ret)
10485 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10486 SET_EXPR_LOCATION (ret, loc);
10487 suppress_warning (ret);
10488 return ret;
10490 return NULL_TREE;
10493 /* Initialize format string characters in the target charset. */
10495 bool
10496 init_target_chars (void)
10498 static bool init;
10499 if (!init)
10501 target_newline = lang_hooks.to_target_charset ('\n');
10502 target_percent = lang_hooks.to_target_charset ('%');
10503 target_c = lang_hooks.to_target_charset ('c');
10504 target_s = lang_hooks.to_target_charset ('s');
10505 if (target_newline == 0 || target_percent == 0 || target_c == 0
10506 || target_s == 0)
10507 return false;
10509 target_percent_c[0] = target_percent;
10510 target_percent_c[1] = target_c;
10511 target_percent_c[2] = '\0';
10513 target_percent_s[0] = target_percent;
10514 target_percent_s[1] = target_s;
10515 target_percent_s[2] = '\0';
10517 target_percent_s_newline[0] = target_percent;
10518 target_percent_s_newline[1] = target_s;
10519 target_percent_s_newline[2] = target_newline;
10520 target_percent_s_newline[3] = '\0';
10522 init = true;
10524 return true;
10527 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10528 and no overflow/underflow occurred. INEXACT is true if M was not
10529 exactly calculated. TYPE is the tree type for the result. This
10530 function assumes that you cleared the MPFR flags and then
10531 calculated M to see if anything subsequently set a flag prior to
10532 entering this function. Return NULL_TREE if any checks fail. */
10534 static tree
10535 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10537 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10538 overflow/underflow occurred. If -frounding-math, proceed iff the
10539 result of calling FUNC was exact. */
10540 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10541 && (!flag_rounding_math || !inexact))
10543 REAL_VALUE_TYPE rr;
10545 real_from_mpfr (&rr, m, type, MPFR_RNDN);
10546 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10547 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10548 but the mpft_t is not, then we underflowed in the
10549 conversion. */
10550 if (real_isfinite (&rr)
10551 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10553 REAL_VALUE_TYPE rmode;
10555 real_convert (&rmode, TYPE_MODE (type), &rr);
10556 /* Proceed iff the specified mode can hold the value. */
10557 if (real_identical (&rmode, &rr))
10558 return build_real (type, rmode);
10561 return NULL_TREE;
10564 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10565 number and no overflow/underflow occurred. INEXACT is true if M
10566 was not exactly calculated. TYPE is the tree type for the result.
10567 This function assumes that you cleared the MPFR flags and then
10568 calculated M to see if anything subsequently set a flag prior to
10569 entering this function. Return NULL_TREE if any checks fail, if
10570 FORCE_CONVERT is true, then bypass the checks. */
10572 static tree
10573 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10575 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10576 overflow/underflow occurred. If -frounding-math, proceed iff the
10577 result of calling FUNC was exact. */
10578 if (force_convert
10579 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10580 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10581 && (!flag_rounding_math || !inexact)))
10583 REAL_VALUE_TYPE re, im;
10585 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), MPFR_RNDN);
10586 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), MPFR_RNDN);
10587 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10588 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10589 but the mpft_t is not, then we underflowed in the
10590 conversion. */
10591 if (force_convert
10592 || (real_isfinite (&re) && real_isfinite (&im)
10593 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10594 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10596 REAL_VALUE_TYPE re_mode, im_mode;
10598 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10599 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10600 /* Proceed iff the specified mode can hold the value. */
10601 if (force_convert
10602 || (real_identical (&re_mode, &re)
10603 && real_identical (&im_mode, &im)))
10604 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10605 build_real (TREE_TYPE (type), im_mode));
10608 return NULL_TREE;
10611 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10612 the pointer *(ARG_QUO) and return the result. The type is taken
10613 from the type of ARG0 and is used for setting the precision of the
10614 calculation and results. */
10616 static tree
10617 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10619 tree const type = TREE_TYPE (arg0);
10620 tree result = NULL_TREE;
10622 STRIP_NOPS (arg0);
10623 STRIP_NOPS (arg1);
10625 /* To proceed, MPFR must exactly represent the target floating point
10626 format, which only happens when the target base equals two. */
10627 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10628 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10629 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10631 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10632 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10634 if (real_isfinite (ra0) && real_isfinite (ra1))
10636 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10637 const int prec = fmt->p;
10638 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
10639 tree result_rem;
10640 long integer_quo;
10641 mpfr_t m0, m1;
10643 mpfr_inits2 (prec, m0, m1, NULL);
10644 mpfr_from_real (m0, ra0, MPFR_RNDN);
10645 mpfr_from_real (m1, ra1, MPFR_RNDN);
10646 mpfr_clear_flags ();
10647 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10648 /* Remquo is independent of the rounding mode, so pass
10649 inexact=0 to do_mpfr_ckconv(). */
10650 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10651 mpfr_clears (m0, m1, NULL);
10652 if (result_rem)
10654 /* MPFR calculates quo in the host's long so it may
10655 return more bits in quo than the target int can hold
10656 if sizeof(host long) > sizeof(target int). This can
10657 happen even for native compilers in LP64 mode. In
10658 these cases, modulo the quo value with the largest
10659 number that the target int can hold while leaving one
10660 bit for the sign. */
10661 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10662 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10664 /* Dereference the quo pointer argument. */
10665 arg_quo = build_fold_indirect_ref (arg_quo);
10666 /* Proceed iff a valid pointer type was passed in. */
10667 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10669 /* Set the value. */
10670 tree result_quo
10671 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10672 build_int_cst (TREE_TYPE (arg_quo),
10673 integer_quo));
10674 TREE_SIDE_EFFECTS (result_quo) = 1;
10675 /* Combine the quo assignment with the rem. */
10676 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10677 result_quo, result_rem));
10682 return result;
10685 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10686 resulting value as a tree with type TYPE. The mpfr precision is
10687 set to the precision of TYPE. We assume that this mpfr function
10688 returns zero if the result could be calculated exactly within the
10689 requested precision. In addition, the integer pointer represented
10690 by ARG_SG will be dereferenced and set to the appropriate signgam
10691 (-1,1) value. */
10693 static tree
10694 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10696 tree result = NULL_TREE;
10698 STRIP_NOPS (arg);
10700 /* To proceed, MPFR must exactly represent the target floating point
10701 format, which only happens when the target base equals two. Also
10702 verify ARG is a constant and that ARG_SG is an int pointer. */
10703 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10704 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10705 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10706 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10708 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10710 /* In addition to NaN and Inf, the argument cannot be zero or a
10711 negative integer. */
10712 if (real_isfinite (ra)
10713 && ra->cl != rvc_zero
10714 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10716 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10717 const int prec = fmt->p;
10718 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
10719 int inexact, sg;
10720 mpfr_t m;
10721 tree result_lg;
10723 mpfr_init2 (m, prec);
10724 mpfr_from_real (m, ra, MPFR_RNDN);
10725 mpfr_clear_flags ();
10726 inexact = mpfr_lgamma (m, &sg, m, rnd);
10727 result_lg = do_mpfr_ckconv (m, type, inexact);
10728 mpfr_clear (m);
10729 if (result_lg)
10731 tree result_sg;
10733 /* Dereference the arg_sg pointer argument. */
10734 arg_sg = build_fold_indirect_ref (arg_sg);
10735 /* Assign the signgam value into *arg_sg. */
10736 result_sg = fold_build2 (MODIFY_EXPR,
10737 TREE_TYPE (arg_sg), arg_sg,
10738 build_int_cst (TREE_TYPE (arg_sg), sg));
10739 TREE_SIDE_EFFECTS (result_sg) = 1;
10740 /* Combine the signgam assignment with the lgamma result. */
10741 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10742 result_sg, result_lg));
10747 return result;
10750 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10751 mpc function FUNC on it and return the resulting value as a tree
10752 with type TYPE. The mpfr precision is set to the precision of
10753 TYPE. We assume that function FUNC returns zero if the result
10754 could be calculated exactly within the requested precision. If
10755 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10756 in the arguments and/or results. */
10758 tree
10759 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10760 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10762 tree result = NULL_TREE;
10764 STRIP_NOPS (arg0);
10765 STRIP_NOPS (arg1);
10767 /* To proceed, MPFR must exactly represent the target floating point
10768 format, which only happens when the target base equals two. */
10769 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10770 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10771 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10772 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10773 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10775 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10776 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10777 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10778 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10780 if (do_nonfinite
10781 || (real_isfinite (re0) && real_isfinite (im0)
10782 && real_isfinite (re1) && real_isfinite (im1)))
10784 const struct real_format *const fmt =
10785 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10786 const int prec = fmt->p;
10787 const mpfr_rnd_t rnd = fmt->round_towards_zero
10788 ? MPFR_RNDZ : MPFR_RNDN;
10789 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10790 int inexact;
10791 mpc_t m0, m1;
10793 mpc_init2 (m0, prec);
10794 mpc_init2 (m1, prec);
10795 mpfr_from_real (mpc_realref (m0), re0, rnd);
10796 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10797 mpfr_from_real (mpc_realref (m1), re1, rnd);
10798 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10799 mpfr_clear_flags ();
10800 inexact = func (m0, m0, m1, crnd);
10801 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10802 mpc_clear (m0);
10803 mpc_clear (m1);
10807 return result;
10810 /* A wrapper function for builtin folding that prevents warnings for
10811 "statement without effect" and the like, caused by removing the
10812 call node earlier than the warning is generated. */
10814 tree
10815 fold_call_stmt (gcall *stmt, bool ignore)
10817 tree ret = NULL_TREE;
10818 tree fndecl = gimple_call_fndecl (stmt);
10819 location_t loc = gimple_location (stmt);
10820 if (fndecl && fndecl_built_in_p (fndecl)
10821 && !gimple_call_va_arg_pack_p (stmt))
10823 int nargs = gimple_call_num_args (stmt);
10824 tree *args = (nargs > 0
10825 ? gimple_call_arg_ptr (stmt, 0)
10826 : &error_mark_node);
10828 if (avoid_folding_inline_builtin (fndecl))
10829 return NULL_TREE;
10830 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10832 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10834 else
10836 ret = fold_builtin_n (loc, NULL_TREE, fndecl, args, nargs, ignore);
10837 if (ret)
10839 /* Propagate location information from original call to
10840 expansion of builtin. Otherwise things like
10841 maybe_emit_chk_warning, that operate on the expansion
10842 of a builtin, will use the wrong location information. */
10843 if (gimple_has_location (stmt))
10845 tree realret = ret;
10846 if (TREE_CODE (ret) == NOP_EXPR)
10847 realret = TREE_OPERAND (ret, 0);
10848 if (CAN_HAVE_LOCATION_P (realret)
10849 && !EXPR_HAS_LOCATION (realret))
10850 SET_EXPR_LOCATION (realret, loc);
10851 return realret;
10853 return ret;
10857 return NULL_TREE;
10860 /* Look up the function in builtin_decl that corresponds to DECL
10861 and set ASMSPEC as its user assembler name. DECL must be a
10862 function decl that declares a builtin. */
10864 void
10865 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10867 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
10868 && asmspec != 0);
10870 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10871 set_user_assembler_name (builtin, asmspec);
10873 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10874 && INT_TYPE_SIZE < BITS_PER_WORD)
10876 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
10877 set_user_assembler_libfunc ("ffs", asmspec);
10878 set_optab_libfunc (ffs_optab, mode, "ffs");
10882 /* Return true if DECL is a builtin that expands to a constant or similarly
10883 simple code. */
10884 bool
10885 is_simple_builtin (tree decl)
10887 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
10888 switch (DECL_FUNCTION_CODE (decl))
10890 /* Builtins that expand to constants. */
10891 case BUILT_IN_CONSTANT_P:
10892 case BUILT_IN_EXPECT:
10893 case BUILT_IN_OBJECT_SIZE:
10894 case BUILT_IN_UNREACHABLE:
10895 /* Simple register moves or loads from stack. */
10896 case BUILT_IN_ASSUME_ALIGNED:
10897 case BUILT_IN_RETURN_ADDRESS:
10898 case BUILT_IN_EXTRACT_RETURN_ADDR:
10899 case BUILT_IN_FROB_RETURN_ADDR:
10900 case BUILT_IN_RETURN:
10901 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10902 case BUILT_IN_FRAME_ADDRESS:
10903 case BUILT_IN_VA_END:
10904 case BUILT_IN_STACK_SAVE:
10905 case BUILT_IN_STACK_RESTORE:
10906 case BUILT_IN_DWARF_CFA:
10907 /* Exception state returns or moves registers around. */
10908 case BUILT_IN_EH_FILTER:
10909 case BUILT_IN_EH_POINTER:
10910 case BUILT_IN_EH_COPY_VALUES:
10911 return true;
10913 default:
10914 return false;
10917 return false;
10920 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10921 most probably expanded inline into reasonably simple code. This is a
10922 superset of is_simple_builtin. */
10923 bool
10924 is_inexpensive_builtin (tree decl)
10926 if (!decl)
10927 return false;
10928 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10929 return true;
10930 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10931 switch (DECL_FUNCTION_CODE (decl))
10933 case BUILT_IN_ABS:
10934 CASE_BUILT_IN_ALLOCA:
10935 case BUILT_IN_BSWAP16:
10936 case BUILT_IN_BSWAP32:
10937 case BUILT_IN_BSWAP64:
10938 case BUILT_IN_BSWAP128:
10939 case BUILT_IN_CLZ:
10940 case BUILT_IN_CLZIMAX:
10941 case BUILT_IN_CLZL:
10942 case BUILT_IN_CLZLL:
10943 case BUILT_IN_CTZ:
10944 case BUILT_IN_CTZIMAX:
10945 case BUILT_IN_CTZL:
10946 case BUILT_IN_CTZLL:
10947 case BUILT_IN_FFS:
10948 case BUILT_IN_FFSIMAX:
10949 case BUILT_IN_FFSL:
10950 case BUILT_IN_FFSLL:
10951 case BUILT_IN_IMAXABS:
10952 case BUILT_IN_FINITE:
10953 case BUILT_IN_FINITEF:
10954 case BUILT_IN_FINITEL:
10955 case BUILT_IN_FINITED32:
10956 case BUILT_IN_FINITED64:
10957 case BUILT_IN_FINITED128:
10958 case BUILT_IN_FPCLASSIFY:
10959 case BUILT_IN_ISFINITE:
10960 case BUILT_IN_ISINF_SIGN:
10961 case BUILT_IN_ISINF:
10962 case BUILT_IN_ISINFF:
10963 case BUILT_IN_ISINFL:
10964 case BUILT_IN_ISINFD32:
10965 case BUILT_IN_ISINFD64:
10966 case BUILT_IN_ISINFD128:
10967 case BUILT_IN_ISNAN:
10968 case BUILT_IN_ISNANF:
10969 case BUILT_IN_ISNANL:
10970 case BUILT_IN_ISNAND32:
10971 case BUILT_IN_ISNAND64:
10972 case BUILT_IN_ISNAND128:
10973 case BUILT_IN_ISNORMAL:
10974 case BUILT_IN_ISGREATER:
10975 case BUILT_IN_ISGREATEREQUAL:
10976 case BUILT_IN_ISLESS:
10977 case BUILT_IN_ISLESSEQUAL:
10978 case BUILT_IN_ISLESSGREATER:
10979 case BUILT_IN_ISUNORDERED:
10980 case BUILT_IN_VA_ARG_PACK:
10981 case BUILT_IN_VA_ARG_PACK_LEN:
10982 case BUILT_IN_VA_COPY:
10983 case BUILT_IN_TRAP:
10984 case BUILT_IN_SAVEREGS:
10985 case BUILT_IN_POPCOUNTL:
10986 case BUILT_IN_POPCOUNTLL:
10987 case BUILT_IN_POPCOUNTIMAX:
10988 case BUILT_IN_POPCOUNT:
10989 case BUILT_IN_PARITYL:
10990 case BUILT_IN_PARITYLL:
10991 case BUILT_IN_PARITYIMAX:
10992 case BUILT_IN_PARITY:
10993 case BUILT_IN_LABS:
10994 case BUILT_IN_LLABS:
10995 case BUILT_IN_PREFETCH:
10996 case BUILT_IN_ACC_ON_DEVICE:
10997 return true;
10999 default:
11000 return is_simple_builtin (decl);
11003 return false;
11006 /* Return true if T is a constant and the value cast to a target char
11007 can be represented by a host char.
11008 Store the casted char constant in *P if so. */
11010 bool
11011 target_char_cst_p (tree t, char *p)
11013 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
11014 return false;
11016 *p = (char)tree_to_uhwi (t);
11017 return true;
11020 /* Return true if the builtin DECL is implemented in a standard library.
11021 Otherwise return false which doesn't guarantee it is not (thus the list
11022 of handled builtins below may be incomplete). */
11024 bool
11025 builtin_with_linkage_p (tree decl)
11027 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11028 switch (DECL_FUNCTION_CODE (decl))
11030 CASE_FLT_FN (BUILT_IN_ACOS):
11031 CASE_FLT_FN (BUILT_IN_ACOSH):
11032 CASE_FLT_FN (BUILT_IN_ASIN):
11033 CASE_FLT_FN (BUILT_IN_ASINH):
11034 CASE_FLT_FN (BUILT_IN_ATAN):
11035 CASE_FLT_FN (BUILT_IN_ATANH):
11036 CASE_FLT_FN (BUILT_IN_ATAN2):
11037 CASE_FLT_FN (BUILT_IN_CBRT):
11038 CASE_FLT_FN (BUILT_IN_CEIL):
11039 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
11040 CASE_FLT_FN (BUILT_IN_COPYSIGN):
11041 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
11042 CASE_FLT_FN (BUILT_IN_COS):
11043 CASE_FLT_FN (BUILT_IN_COSH):
11044 CASE_FLT_FN (BUILT_IN_ERF):
11045 CASE_FLT_FN (BUILT_IN_ERFC):
11046 CASE_FLT_FN (BUILT_IN_EXP):
11047 CASE_FLT_FN (BUILT_IN_EXP2):
11048 CASE_FLT_FN (BUILT_IN_EXPM1):
11049 CASE_FLT_FN (BUILT_IN_FABS):
11050 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
11051 CASE_FLT_FN (BUILT_IN_FDIM):
11052 CASE_FLT_FN (BUILT_IN_FLOOR):
11053 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
11054 CASE_FLT_FN (BUILT_IN_FMA):
11055 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
11056 CASE_FLT_FN (BUILT_IN_FMAX):
11057 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
11058 CASE_FLT_FN (BUILT_IN_FMIN):
11059 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
11060 CASE_FLT_FN (BUILT_IN_FMOD):
11061 CASE_FLT_FN (BUILT_IN_FREXP):
11062 CASE_FLT_FN (BUILT_IN_HYPOT):
11063 CASE_FLT_FN (BUILT_IN_ILOGB):
11064 CASE_FLT_FN (BUILT_IN_LDEXP):
11065 CASE_FLT_FN (BUILT_IN_LGAMMA):
11066 CASE_FLT_FN (BUILT_IN_LLRINT):
11067 CASE_FLT_FN (BUILT_IN_LLROUND):
11068 CASE_FLT_FN (BUILT_IN_LOG):
11069 CASE_FLT_FN (BUILT_IN_LOG10):
11070 CASE_FLT_FN (BUILT_IN_LOG1P):
11071 CASE_FLT_FN (BUILT_IN_LOG2):
11072 CASE_FLT_FN (BUILT_IN_LOGB):
11073 CASE_FLT_FN (BUILT_IN_LRINT):
11074 CASE_FLT_FN (BUILT_IN_LROUND):
11075 CASE_FLT_FN (BUILT_IN_MODF):
11076 CASE_FLT_FN (BUILT_IN_NAN):
11077 CASE_FLT_FN (BUILT_IN_NEARBYINT):
11078 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
11079 CASE_FLT_FN (BUILT_IN_NEXTAFTER):
11080 CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
11081 CASE_FLT_FN (BUILT_IN_POW):
11082 CASE_FLT_FN (BUILT_IN_REMAINDER):
11083 CASE_FLT_FN (BUILT_IN_REMQUO):
11084 CASE_FLT_FN (BUILT_IN_RINT):
11085 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
11086 CASE_FLT_FN (BUILT_IN_ROUND):
11087 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
11088 CASE_FLT_FN (BUILT_IN_SCALBLN):
11089 CASE_FLT_FN (BUILT_IN_SCALBN):
11090 CASE_FLT_FN (BUILT_IN_SIN):
11091 CASE_FLT_FN (BUILT_IN_SINH):
11092 CASE_FLT_FN (BUILT_IN_SINCOS):
11093 CASE_FLT_FN (BUILT_IN_SQRT):
11094 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
11095 CASE_FLT_FN (BUILT_IN_TAN):
11096 CASE_FLT_FN (BUILT_IN_TANH):
11097 CASE_FLT_FN (BUILT_IN_TGAMMA):
11098 CASE_FLT_FN (BUILT_IN_TRUNC):
11099 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
11100 return true;
11102 case BUILT_IN_STPCPY:
11103 case BUILT_IN_STPNCPY:
11104 /* stpcpy is both referenced in libiberty's pex-win32.c and provided
11105 by libiberty's stpcpy.c for MinGW targets so we need to return true
11106 in order to be able to build libiberty in LTO mode for them. */
11107 return true;
11109 default:
11110 break;
11112 return false;
11115 /* Return true if OFFRNG is bounded to a subrange of offset values
11116 valid for the largest possible object. */
11118 bool
11119 access_ref::offset_bounded () const
11121 tree min = TYPE_MIN_VALUE (ptrdiff_type_node);
11122 tree max = TYPE_MAX_VALUE (ptrdiff_type_node);
11123 return wi::to_offset (min) <= offrng[0] && offrng[1] <= wi::to_offset (max);
11126 /* If CALLEE has known side effects, fill in INFO and return true.
11127 See tree-ssa-structalias.cc:find_func_aliases
11128 for the list of builtins we might need to handle here. */
11130 attr_fnspec
11131 builtin_fnspec (tree callee)
11133 built_in_function code = DECL_FUNCTION_CODE (callee);
11135 switch (code)
11137 /* All the following functions read memory pointed to by
11138 their second argument and write memory pointed to by first
11139 argument.
11140 strcat/strncat additionally reads memory pointed to by the first
11141 argument. */
11142 case BUILT_IN_STRCAT:
11143 case BUILT_IN_STRCAT_CHK:
11144 return "1cW 1 ";
11145 case BUILT_IN_STRNCAT:
11146 case BUILT_IN_STRNCAT_CHK:
11147 return "1cW 13";
11148 case BUILT_IN_STRCPY:
11149 case BUILT_IN_STRCPY_CHK:
11150 return "1cO 1 ";
11151 case BUILT_IN_STPCPY:
11152 case BUILT_IN_STPCPY_CHK:
11153 return ".cO 1 ";
11154 case BUILT_IN_STRNCPY:
11155 case BUILT_IN_MEMCPY:
11156 case BUILT_IN_MEMMOVE:
11157 case BUILT_IN_TM_MEMCPY:
11158 case BUILT_IN_TM_MEMMOVE:
11159 case BUILT_IN_STRNCPY_CHK:
11160 case BUILT_IN_MEMCPY_CHK:
11161 case BUILT_IN_MEMMOVE_CHK:
11162 return "1cO313";
11163 case BUILT_IN_MEMPCPY:
11164 case BUILT_IN_MEMPCPY_CHK:
11165 return ".cO313";
11166 case BUILT_IN_STPNCPY:
11167 case BUILT_IN_STPNCPY_CHK:
11168 return ".cO313";
11169 case BUILT_IN_BCOPY:
11170 return ".c23O3";
11171 case BUILT_IN_BZERO:
11172 return ".cO2";
11173 case BUILT_IN_MEMCMP:
11174 case BUILT_IN_MEMCMP_EQ:
11175 case BUILT_IN_BCMP:
11176 case BUILT_IN_STRNCMP:
11177 case BUILT_IN_STRNCMP_EQ:
11178 case BUILT_IN_STRNCASECMP:
11179 return ".cR3R3";
11181 /* The following functions read memory pointed to by their
11182 first argument. */
11183 CASE_BUILT_IN_TM_LOAD (1):
11184 CASE_BUILT_IN_TM_LOAD (2):
11185 CASE_BUILT_IN_TM_LOAD (4):
11186 CASE_BUILT_IN_TM_LOAD (8):
11187 CASE_BUILT_IN_TM_LOAD (FLOAT):
11188 CASE_BUILT_IN_TM_LOAD (DOUBLE):
11189 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
11190 CASE_BUILT_IN_TM_LOAD (M64):
11191 CASE_BUILT_IN_TM_LOAD (M128):
11192 CASE_BUILT_IN_TM_LOAD (M256):
11193 case BUILT_IN_TM_LOG:
11194 case BUILT_IN_TM_LOG_1:
11195 case BUILT_IN_TM_LOG_2:
11196 case BUILT_IN_TM_LOG_4:
11197 case BUILT_IN_TM_LOG_8:
11198 case BUILT_IN_TM_LOG_FLOAT:
11199 case BUILT_IN_TM_LOG_DOUBLE:
11200 case BUILT_IN_TM_LOG_LDOUBLE:
11201 case BUILT_IN_TM_LOG_M64:
11202 case BUILT_IN_TM_LOG_M128:
11203 case BUILT_IN_TM_LOG_M256:
11204 return ".cR ";
11206 case BUILT_IN_INDEX:
11207 case BUILT_IN_RINDEX:
11208 case BUILT_IN_STRCHR:
11209 case BUILT_IN_STRLEN:
11210 case BUILT_IN_STRRCHR:
11211 return ".cR ";
11212 case BUILT_IN_STRNLEN:
11213 return ".cR2";
11215 /* These read memory pointed to by the first argument.
11216 Allocating memory does not have any side-effects apart from
11217 being the definition point for the pointer.
11218 Unix98 specifies that errno is set on allocation failure. */
11219 case BUILT_IN_STRDUP:
11220 return "mCR ";
11221 case BUILT_IN_STRNDUP:
11222 return "mCR2";
11223 /* Allocating memory does not have any side-effects apart from
11224 being the definition point for the pointer. */
11225 case BUILT_IN_MALLOC:
11226 case BUILT_IN_ALIGNED_ALLOC:
11227 case BUILT_IN_CALLOC:
11228 case BUILT_IN_GOMP_ALLOC:
11229 return "mC";
11230 CASE_BUILT_IN_ALLOCA:
11231 return "mc";
11232 /* These read memory pointed to by the first argument with size
11233 in the third argument. */
11234 case BUILT_IN_MEMCHR:
11235 return ".cR3";
11236 /* These read memory pointed to by the first and second arguments. */
11237 case BUILT_IN_STRSTR:
11238 case BUILT_IN_STRPBRK:
11239 case BUILT_IN_STRCASECMP:
11240 case BUILT_IN_STRCSPN:
11241 case BUILT_IN_STRSPN:
11242 case BUILT_IN_STRCMP:
11243 case BUILT_IN_STRCMP_EQ:
11244 return ".cR R ";
11245 /* Freeing memory kills the pointed-to memory. More importantly
11246 the call has to serve as a barrier for moving loads and stores
11247 across it. */
11248 case BUILT_IN_STACK_RESTORE:
11249 case BUILT_IN_FREE:
11250 case BUILT_IN_GOMP_FREE:
11251 return ".co ";
11252 case BUILT_IN_VA_END:
11253 return ".cO ";
11254 /* Realloc serves both as allocation point and deallocation point. */
11255 case BUILT_IN_REALLOC:
11256 return ".Cw ";
11257 case BUILT_IN_GAMMA_R:
11258 case BUILT_IN_GAMMAF_R:
11259 case BUILT_IN_GAMMAL_R:
11260 case BUILT_IN_LGAMMA_R:
11261 case BUILT_IN_LGAMMAF_R:
11262 case BUILT_IN_LGAMMAL_R:
11263 return ".C. Ot";
11264 case BUILT_IN_FREXP:
11265 case BUILT_IN_FREXPF:
11266 case BUILT_IN_FREXPL:
11267 case BUILT_IN_MODF:
11268 case BUILT_IN_MODFF:
11269 case BUILT_IN_MODFL:
11270 return ".c. Ot";
11271 case BUILT_IN_REMQUO:
11272 case BUILT_IN_REMQUOF:
11273 case BUILT_IN_REMQUOL:
11274 return ".c. . Ot";
11275 case BUILT_IN_SINCOS:
11276 case BUILT_IN_SINCOSF:
11277 case BUILT_IN_SINCOSL:
11278 return ".c. OtOt";
11279 case BUILT_IN_MEMSET:
11280 case BUILT_IN_MEMSET_CHK:
11281 case BUILT_IN_TM_MEMSET:
11282 return "1cO3";
11283 CASE_BUILT_IN_TM_STORE (1):
11284 CASE_BUILT_IN_TM_STORE (2):
11285 CASE_BUILT_IN_TM_STORE (4):
11286 CASE_BUILT_IN_TM_STORE (8):
11287 CASE_BUILT_IN_TM_STORE (FLOAT):
11288 CASE_BUILT_IN_TM_STORE (DOUBLE):
11289 CASE_BUILT_IN_TM_STORE (LDOUBLE):
11290 CASE_BUILT_IN_TM_STORE (M64):
11291 CASE_BUILT_IN_TM_STORE (M128):
11292 CASE_BUILT_IN_TM_STORE (M256):
11293 return ".cO ";
11294 case BUILT_IN_STACK_SAVE:
11295 case BUILT_IN_RETURN:
11296 case BUILT_IN_EH_POINTER:
11297 case BUILT_IN_EH_FILTER:
11298 case BUILT_IN_UNWIND_RESUME:
11299 case BUILT_IN_CXA_END_CLEANUP:
11300 case BUILT_IN_EH_COPY_VALUES:
11301 case BUILT_IN_FRAME_ADDRESS:
11302 case BUILT_IN_APPLY_ARGS:
11303 case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
11304 case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
11305 case BUILT_IN_PREFETCH:
11306 case BUILT_IN_DWARF_CFA:
11307 case BUILT_IN_RETURN_ADDRESS:
11308 return ".c";
11309 case BUILT_IN_ASSUME_ALIGNED:
11310 return "1cX ";
11311 /* But posix_memalign stores a pointer into the memory pointed to
11312 by its first argument. */
11313 case BUILT_IN_POSIX_MEMALIGN:
11314 return ".cOt";
11316 default:
11317 return "";