c++: using from enclosing class template [PR105006]
[official-gcc.git] / gcc / builtins.cc
blob4c6c29390531d8ae9765add598621727213b23ec
1 /* Expand builtin functions.
2 Copyright (C) 1988-2022 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.cc instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-access.h"
47 #include "stor-layout.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "tree-object-size.h"
51 #include "tree-ssa-strlen.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
75 #include "tree-dfa.h"
76 #include "gimple-iterator.h"
77 #include "gimple-ssa.h"
78 #include "tree-ssa-live.h"
79 #include "tree-outof-ssa.h"
80 #include "attr-fnspec.h"
81 #include "demangle.h"
82 #include "gimple-range.h"
83 #include "pointer-query.h"
85 struct target_builtins default_target_builtins;
86 #if SWITCHABLE_TARGET
87 struct target_builtins *this_target_builtins = &default_target_builtins;
88 #endif
90 /* Define the names of the builtin function types and codes. */
91 const char *const built_in_class_names[BUILT_IN_LAST]
92 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
94 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
95 const char * built_in_names[(int) END_BUILTINS] =
97 #include "builtins.def"
100 /* Setup an array of builtin_info_type, make sure each element decl is
101 initialized to NULL_TREE. */
102 builtin_info_type builtin_info[(int)END_BUILTINS];
104 /* Non-zero if __builtin_constant_p should be folded right away. */
105 bool force_folding_builtin_constant_p;
107 static int target_char_cast (tree, char *);
108 static int apply_args_size (void);
109 static int apply_result_size (void);
110 static rtx result_vector (int, rtx);
111 static void expand_builtin_prefetch (tree);
112 static rtx expand_builtin_apply_args (void);
113 static rtx expand_builtin_apply_args_1 (void);
114 static rtx expand_builtin_apply (rtx, rtx, rtx);
115 static void expand_builtin_return (rtx);
116 static enum type_class type_to_class (tree);
117 static rtx expand_builtin_classify_type (tree);
118 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
119 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
120 static rtx expand_builtin_interclass_mathfn (tree, rtx);
121 static rtx expand_builtin_sincos (tree);
122 static rtx expand_builtin_fegetround (tree, rtx, machine_mode);
123 static rtx expand_builtin_feclear_feraise_except (tree, rtx, machine_mode,
124 optab);
125 static rtx expand_builtin_cexpi (tree, rtx);
126 static rtx expand_builtin_int_roundingfn (tree, rtx);
127 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
128 static rtx expand_builtin_next_arg (void);
129 static rtx expand_builtin_va_start (tree);
130 static rtx expand_builtin_va_end (tree);
131 static rtx expand_builtin_va_copy (tree);
132 static rtx inline_expand_builtin_bytecmp (tree, rtx);
133 static rtx expand_builtin_strcmp (tree, rtx);
134 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
135 static rtx expand_builtin_memcpy (tree, rtx);
136 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
137 rtx target, tree exp,
138 memop_ret retmode,
139 bool might_overlap);
140 static rtx expand_builtin_memmove (tree, rtx);
141 static rtx expand_builtin_mempcpy (tree, rtx);
142 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
143 static rtx expand_builtin_strcpy (tree, rtx);
144 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
145 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
146 static rtx expand_builtin_strncpy (tree, rtx);
147 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
148 static rtx expand_builtin_bzero (tree);
149 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
150 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
151 static rtx expand_builtin_alloca (tree);
152 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
153 static rtx expand_builtin_frame_address (tree, tree);
154 static tree stabilize_va_list_loc (location_t, tree, int);
155 static rtx expand_builtin_expect (tree, rtx);
156 static rtx expand_builtin_expect_with_probability (tree, rtx);
157 static tree fold_builtin_constant_p (tree);
158 static tree fold_builtin_classify_type (tree);
159 static tree fold_builtin_strlen (location_t, tree, tree, tree);
160 static tree fold_builtin_inf (location_t, tree, int);
161 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
162 static bool validate_arg (const_tree, enum tree_code code);
163 static rtx expand_builtin_fabs (tree, rtx, rtx);
164 static rtx expand_builtin_signbit (tree, rtx);
165 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
166 static tree fold_builtin_isascii (location_t, tree);
167 static tree fold_builtin_toascii (location_t, tree);
168 static tree fold_builtin_isdigit (location_t, tree);
169 static tree fold_builtin_fabs (location_t, tree, tree);
170 static tree fold_builtin_abs (location_t, tree, tree);
171 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
172 enum tree_code);
173 static tree fold_builtin_varargs (location_t, tree, tree*, int);
175 static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
176 static tree fold_builtin_strspn (location_t, tree, tree, tree);
177 static tree fold_builtin_strcspn (location_t, tree, tree, tree);
179 static rtx expand_builtin_object_size (tree);
180 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
181 enum built_in_function);
182 static void maybe_emit_chk_warning (tree, enum built_in_function);
183 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
184 static tree fold_builtin_object_size (tree, tree, enum built_in_function);
186 unsigned HOST_WIDE_INT target_newline;
187 unsigned HOST_WIDE_INT target_percent;
188 static unsigned HOST_WIDE_INT target_c;
189 static unsigned HOST_WIDE_INT target_s;
190 char target_percent_c[3];
191 char target_percent_s[3];
192 char target_percent_s_newline[4];
193 static tree do_mpfr_remquo (tree, tree, tree);
194 static tree do_mpfr_lgamma_r (tree, tree, tree);
195 static void expand_builtin_sync_synchronize (void);
197 /* Return true if NAME starts with __builtin_ or __sync_. */
199 static bool
200 is_builtin_name (const char *name)
202 return (startswith (name, "__builtin_")
203 || startswith (name, "__sync_")
204 || startswith (name, "__atomic_"));
207 /* Return true if NODE should be considered for inline expansion regardless
208 of the optimization level. This means whenever a function is invoked with
209 its "internal" name, which normally contains the prefix "__builtin". */
211 bool
212 called_as_built_in (tree node)
214 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
215 we want the name used to call the function, not the name it
216 will have. */
217 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
218 return is_builtin_name (name);
221 /* Compute values M and N such that M divides (address of EXP - N) and such
222 that N < M. If these numbers can be determined, store M in alignp and N in
223 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
224 *alignp and any bit-offset to *bitposp.
226 Note that the address (and thus the alignment) computed here is based
227 on the address to which a symbol resolves, whereas DECL_ALIGN is based
228 on the address at which an object is actually located. These two
229 addresses are not always the same. For example, on ARM targets,
230 the address &foo of a Thumb function foo() has the lowest bit set,
231 whereas foo() itself starts on an even address.
233 If ADDR_P is true we are taking the address of the memory reference EXP
234 and thus cannot rely on the access taking place. */
236 bool
237 get_object_alignment_2 (tree exp, unsigned int *alignp,
238 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
240 poly_int64 bitsize, bitpos;
241 tree offset;
242 machine_mode mode;
243 int unsignedp, reversep, volatilep;
244 unsigned int align = BITS_PER_UNIT;
245 bool known_alignment = false;
247 /* Get the innermost object and the constant (bitpos) and possibly
248 variable (offset) offset of the access. */
249 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
250 &unsignedp, &reversep, &volatilep);
252 /* Extract alignment information from the innermost object and
253 possibly adjust bitpos and offset. */
254 if (TREE_CODE (exp) == FUNCTION_DECL)
256 /* Function addresses can encode extra information besides their
257 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
258 allows the low bit to be used as a virtual bit, we know
259 that the address itself must be at least 2-byte aligned. */
260 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
261 align = 2 * BITS_PER_UNIT;
263 else if (TREE_CODE (exp) == LABEL_DECL)
265 else if (TREE_CODE (exp) == CONST_DECL)
267 /* The alignment of a CONST_DECL is determined by its initializer. */
268 exp = DECL_INITIAL (exp);
269 align = TYPE_ALIGN (TREE_TYPE (exp));
270 if (CONSTANT_CLASS_P (exp))
271 align = targetm.constant_alignment (exp, align);
273 known_alignment = true;
275 else if (DECL_P (exp))
277 align = DECL_ALIGN (exp);
278 known_alignment = true;
280 else if (TREE_CODE (exp) == INDIRECT_REF
281 || TREE_CODE (exp) == MEM_REF
282 || TREE_CODE (exp) == TARGET_MEM_REF)
284 tree addr = TREE_OPERAND (exp, 0);
285 unsigned ptr_align;
286 unsigned HOST_WIDE_INT ptr_bitpos;
287 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
289 /* If the address is explicitely aligned, handle that. */
290 if (TREE_CODE (addr) == BIT_AND_EXPR
291 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
293 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
294 ptr_bitmask *= BITS_PER_UNIT;
295 align = least_bit_hwi (ptr_bitmask);
296 addr = TREE_OPERAND (addr, 0);
299 known_alignment
300 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
301 align = MAX (ptr_align, align);
303 /* Re-apply explicit alignment to the bitpos. */
304 ptr_bitpos &= ptr_bitmask;
306 /* The alignment of the pointer operand in a TARGET_MEM_REF
307 has to take the variable offset parts into account. */
308 if (TREE_CODE (exp) == TARGET_MEM_REF)
310 if (TMR_INDEX (exp))
312 unsigned HOST_WIDE_INT step = 1;
313 if (TMR_STEP (exp))
314 step = TREE_INT_CST_LOW (TMR_STEP (exp));
315 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
317 if (TMR_INDEX2 (exp))
318 align = BITS_PER_UNIT;
319 known_alignment = false;
322 /* When EXP is an actual memory reference then we can use
323 TYPE_ALIGN of a pointer indirection to derive alignment.
324 Do so only if get_pointer_alignment_1 did not reveal absolute
325 alignment knowledge and if using that alignment would
326 improve the situation. */
327 unsigned int talign;
328 if (!addr_p && !known_alignment
329 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
330 && talign > align)
331 align = talign;
332 else
334 /* Else adjust bitpos accordingly. */
335 bitpos += ptr_bitpos;
336 if (TREE_CODE (exp) == MEM_REF
337 || TREE_CODE (exp) == TARGET_MEM_REF)
338 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
341 else if (TREE_CODE (exp) == STRING_CST)
343 /* STRING_CST are the only constant objects we allow to be not
344 wrapped inside a CONST_DECL. */
345 align = TYPE_ALIGN (TREE_TYPE (exp));
346 if (CONSTANT_CLASS_P (exp))
347 align = targetm.constant_alignment (exp, align);
349 known_alignment = true;
352 /* If there is a non-constant offset part extract the maximum
353 alignment that can prevail. */
354 if (offset)
356 unsigned int trailing_zeros = tree_ctz (offset);
357 if (trailing_zeros < HOST_BITS_PER_INT)
359 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
360 if (inner)
361 align = MIN (align, inner);
365 /* Account for the alignment of runtime coefficients, so that the constant
366 bitpos is guaranteed to be accurate. */
367 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
368 if (alt_align != 0 && alt_align < align)
370 align = alt_align;
371 known_alignment = false;
374 *alignp = align;
375 *bitposp = bitpos.coeffs[0] & (align - 1);
376 return known_alignment;
379 /* For a memory reference expression EXP compute values M and N such that M
380 divides (&EXP - N) and such that N < M. If these numbers can be determined,
381 store M in alignp and N in *BITPOSP and return true. Otherwise return false
382 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
384 bool
385 get_object_alignment_1 (tree exp, unsigned int *alignp,
386 unsigned HOST_WIDE_INT *bitposp)
388 /* Strip a WITH_SIZE_EXPR, get_inner_reference doesn't know how to deal
389 with it. */
390 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
391 exp = TREE_OPERAND (exp, 0);
392 return get_object_alignment_2 (exp, alignp, bitposp, false);
395 /* Return the alignment in bits of EXP, an object. */
397 unsigned int
398 get_object_alignment (tree exp)
400 unsigned HOST_WIDE_INT bitpos = 0;
401 unsigned int align;
403 get_object_alignment_1 (exp, &align, &bitpos);
405 /* align and bitpos now specify known low bits of the pointer.
406 ptr & (align - 1) == bitpos. */
408 if (bitpos != 0)
409 align = least_bit_hwi (bitpos);
410 return align;
413 /* For a pointer valued expression EXP compute values M and N such that M
414 divides (EXP - N) and such that N < M. If these numbers can be determined,
415 store M in alignp and N in *BITPOSP and return true. Return false if
416 the results are just a conservative approximation.
418 If EXP is not a pointer, false is returned too. */
420 bool
421 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
422 unsigned HOST_WIDE_INT *bitposp)
424 STRIP_NOPS (exp);
426 if (TREE_CODE (exp) == ADDR_EXPR)
427 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
428 alignp, bitposp, true);
429 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
431 unsigned int align;
432 unsigned HOST_WIDE_INT bitpos;
433 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
434 &align, &bitpos);
435 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
436 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
437 else
439 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
440 if (trailing_zeros < HOST_BITS_PER_INT)
442 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
443 if (inner)
444 align = MIN (align, inner);
447 *alignp = align;
448 *bitposp = bitpos & (align - 1);
449 return res;
451 else if (TREE_CODE (exp) == SSA_NAME
452 && POINTER_TYPE_P (TREE_TYPE (exp)))
454 unsigned int ptr_align, ptr_misalign;
455 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
457 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
459 *bitposp = ptr_misalign * BITS_PER_UNIT;
460 *alignp = ptr_align * BITS_PER_UNIT;
461 /* Make sure to return a sensible alignment when the multiplication
462 by BITS_PER_UNIT overflowed. */
463 if (*alignp == 0)
464 *alignp = 1u << (HOST_BITS_PER_INT - 1);
465 /* We cannot really tell whether this result is an approximation. */
466 return false;
468 else
470 *bitposp = 0;
471 *alignp = BITS_PER_UNIT;
472 return false;
475 else if (TREE_CODE (exp) == INTEGER_CST)
477 *alignp = BIGGEST_ALIGNMENT;
478 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
479 & (BIGGEST_ALIGNMENT - 1));
480 return true;
483 *bitposp = 0;
484 *alignp = BITS_PER_UNIT;
485 return false;
488 /* Return the alignment in bits of EXP, a pointer valued expression.
489 The alignment returned is, by default, the alignment of the thing that
490 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
492 Otherwise, look at the expression to see if we can do better, i.e., if the
493 expression is actually pointing at an object whose alignment is tighter. */
495 unsigned int
496 get_pointer_alignment (tree exp)
498 unsigned HOST_WIDE_INT bitpos = 0;
499 unsigned int align;
501 get_pointer_alignment_1 (exp, &align, &bitpos);
503 /* align and bitpos now specify known low bits of the pointer.
504 ptr & (align - 1) == bitpos. */
506 if (bitpos != 0)
507 align = least_bit_hwi (bitpos);
509 return align;
512 /* Return the number of leading non-zero elements in the sequence
513 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
514 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
516 unsigned
517 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
519 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
521 unsigned n;
523 if (eltsize == 1)
525 /* Optimize the common case of plain char. */
526 for (n = 0; n < maxelts; n++)
528 const char *elt = (const char*) ptr + n;
529 if (!*elt)
530 break;
533 else
535 for (n = 0; n < maxelts; n++)
537 const char *elt = (const char*) ptr + n * eltsize;
538 if (!memcmp (elt, "\0\0\0\0", eltsize))
539 break;
542 return n;
545 /* Compute the length of a null-terminated character string or wide
546 character string handling character sizes of 1, 2, and 4 bytes.
547 TREE_STRING_LENGTH is not the right way because it evaluates to
548 the size of the character array in bytes (as opposed to characters)
549 and because it can contain a zero byte in the middle.
551 ONLY_VALUE should be nonzero if the result is not going to be emitted
552 into the instruction stream and zero if it is going to be expanded.
553 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
554 is returned, otherwise NULL, since
555 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
556 evaluate the side-effects.
558 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
559 accesses. Note that this implies the result is not going to be emitted
560 into the instruction stream.
562 Additional information about the string accessed may be recorded
563 in DATA. For example, if ARG references an unterminated string,
564 then the declaration will be stored in the DECL field. If the
565 length of the unterminated string can be determined, it'll be
566 stored in the LEN field. Note this length could well be different
567 than what a C strlen call would return.
569 ELTSIZE is 1 for normal single byte character strings, and 2 or
570 4 for wide characer strings. ELTSIZE is by default 1.
572 The value returned is of type `ssizetype'. */
574 tree
575 c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
577 /* If we were not passed a DATA pointer, then get one to a local
578 structure. That avoids having to check DATA for NULL before
579 each time we want to use it. */
580 c_strlen_data local_strlen_data = { };
581 if (!data)
582 data = &local_strlen_data;
584 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
586 tree src = STRIP_NOPS (arg);
587 if (TREE_CODE (src) == COND_EXPR
588 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
590 tree len1, len2;
592 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
593 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
594 if (tree_int_cst_equal (len1, len2))
595 return len1;
598 if (TREE_CODE (src) == COMPOUND_EXPR
599 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
600 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
602 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
604 /* Offset from the beginning of the string in bytes. */
605 tree byteoff;
606 tree memsize;
607 tree decl;
608 src = string_constant (src, &byteoff, &memsize, &decl);
609 if (src == 0)
610 return NULL_TREE;
612 /* Determine the size of the string element. */
613 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
614 return NULL_TREE;
616 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
617 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
618 in case the latter is less than the size of the array, such as when
619 SRC refers to a short string literal used to initialize a large array.
620 In that case, the elements of the array after the terminating NUL are
621 all NUL. */
622 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
623 strelts = strelts / eltsize;
625 if (!tree_fits_uhwi_p (memsize))
626 return NULL_TREE;
628 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
630 /* PTR can point to the byte representation of any string type, including
631 char* and wchar_t*. */
632 const char *ptr = TREE_STRING_POINTER (src);
634 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
636 /* The code below works only for single byte character types. */
637 if (eltsize != 1)
638 return NULL_TREE;
640 /* If the string has an internal NUL character followed by any
641 non-NUL characters (e.g., "foo\0bar"), we can't compute
642 the offset to the following NUL if we don't know where to
643 start searching for it. */
644 unsigned len = string_length (ptr, eltsize, strelts);
646 /* Return when an embedded null character is found or none at all.
647 In the latter case, set the DECL/LEN field in the DATA structure
648 so that callers may examine them. */
649 if (len + 1 < strelts)
650 return NULL_TREE;
651 else if (len >= maxelts)
653 data->decl = decl;
654 data->off = byteoff;
655 data->minlen = ssize_int (len);
656 return NULL_TREE;
659 /* For empty strings the result should be zero. */
660 if (len == 0)
661 return ssize_int (0);
663 /* We don't know the starting offset, but we do know that the string
664 has no internal zero bytes. If the offset falls within the bounds
665 of the string subtract the offset from the length of the string,
666 and return that. Otherwise the length is zero. Take care to
667 use SAVE_EXPR in case the OFFSET has side-effects. */
668 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
669 : byteoff;
670 offsave = fold_convert_loc (loc, sizetype, offsave);
671 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
672 size_int (len));
673 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
674 offsave);
675 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
676 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
677 build_zero_cst (ssizetype));
680 /* Offset from the beginning of the string in elements. */
681 HOST_WIDE_INT eltoff;
683 /* We have a known offset into the string. Start searching there for
684 a null character if we can represent it as a single HOST_WIDE_INT. */
685 if (byteoff == 0)
686 eltoff = 0;
687 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
688 eltoff = -1;
689 else
690 eltoff = tree_to_uhwi (byteoff) / eltsize;
692 /* If the offset is known to be out of bounds, warn, and call strlen at
693 runtime. */
694 if (eltoff < 0 || eltoff >= maxelts)
696 /* Suppress multiple warnings for propagated constant strings. */
697 if (only_value != 2
698 && !warning_suppressed_p (arg, OPT_Warray_bounds)
699 && warning_at (loc, OPT_Warray_bounds,
700 "offset %qwi outside bounds of constant string",
701 eltoff))
703 if (decl)
704 inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
705 suppress_warning (arg, OPT_Warray_bounds);
707 return NULL_TREE;
710 /* If eltoff is larger than strelts but less than maxelts the
711 string length is zero, since the excess memory will be zero. */
712 if (eltoff > strelts)
713 return ssize_int (0);
715 /* Use strlen to search for the first zero byte. Since any strings
716 constructed with build_string will have nulls appended, we win even
717 if we get handed something like (char[4])"abcd".
719 Since ELTOFF is our starting index into the string, no further
720 calculation is needed. */
721 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
722 strelts - eltoff);
724 /* Don't know what to return if there was no zero termination.
725 Ideally this would turn into a gcc_checking_assert over time.
726 Set DECL/LEN so callers can examine them. */
727 if (len >= maxelts - eltoff)
729 data->decl = decl;
730 data->off = byteoff;
731 data->minlen = ssize_int (len);
732 return NULL_TREE;
735 return ssize_int (len);
738 /* Return a constant integer corresponding to target reading
739 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
740 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
741 are assumed to be zero, otherwise it reads as many characters
742 as needed. */
745 c_readstr (const char *str, scalar_int_mode mode,
746 bool null_terminated_p/*=true*/)
748 HOST_WIDE_INT ch;
749 unsigned int i, j;
750 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
752 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
753 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
754 / HOST_BITS_PER_WIDE_INT;
756 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
757 for (i = 0; i < len; i++)
758 tmp[i] = 0;
760 ch = 1;
761 for (i = 0; i < GET_MODE_SIZE (mode); i++)
763 j = i;
764 if (WORDS_BIG_ENDIAN)
765 j = GET_MODE_SIZE (mode) - i - 1;
766 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
767 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
768 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
769 j *= BITS_PER_UNIT;
771 if (ch || !null_terminated_p)
772 ch = (unsigned char) str[i];
773 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
776 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
777 return immed_wide_int_const (c, mode);
780 /* Cast a target constant CST to target CHAR and if that value fits into
781 host char type, return zero and put that value into variable pointed to by
782 P. */
784 static int
785 target_char_cast (tree cst, char *p)
787 unsigned HOST_WIDE_INT val, hostval;
789 if (TREE_CODE (cst) != INTEGER_CST
790 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
791 return 1;
793 /* Do not care if it fits or not right here. */
794 val = TREE_INT_CST_LOW (cst);
796 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
797 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
799 hostval = val;
800 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
801 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
803 if (val != hostval)
804 return 1;
806 *p = hostval;
807 return 0;
810 /* Similar to save_expr, but assumes that arbitrary code is not executed
811 in between the multiple evaluations. In particular, we assume that a
812 non-addressable local variable will not be modified. */
814 static tree
815 builtin_save_expr (tree exp)
817 if (TREE_CODE (exp) == SSA_NAME
818 || (TREE_ADDRESSABLE (exp) == 0
819 && (TREE_CODE (exp) == PARM_DECL
820 || (VAR_P (exp) && !TREE_STATIC (exp)))))
821 return exp;
823 return save_expr (exp);
826 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
827 times to get the address of either a higher stack frame, or a return
828 address located within it (depending on FNDECL_CODE). */
830 static rtx
831 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
833 int i;
834 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
835 if (tem == NULL_RTX)
837 /* For a zero count with __builtin_return_address, we don't care what
838 frame address we return, because target-specific definitions will
839 override us. Therefore frame pointer elimination is OK, and using
840 the soft frame pointer is OK.
842 For a nonzero count, or a zero count with __builtin_frame_address,
843 we require a stable offset from the current frame pointer to the
844 previous one, so we must use the hard frame pointer, and
845 we must disable frame pointer elimination. */
846 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
847 tem = frame_pointer_rtx;
848 else
850 tem = hard_frame_pointer_rtx;
852 /* Tell reload not to eliminate the frame pointer. */
853 crtl->accesses_prior_frames = 1;
857 if (count > 0)
858 SETUP_FRAME_ADDRESSES ();
860 /* On the SPARC, the return address is not in the frame, it is in a
861 register. There is no way to access it off of the current frame
862 pointer, but it can be accessed off the previous frame pointer by
863 reading the value from the register window save area. */
864 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
865 count--;
867 /* Scan back COUNT frames to the specified frame. */
868 for (i = 0; i < count; i++)
870 /* Assume the dynamic chain pointer is in the word that the
871 frame address points to, unless otherwise specified. */
872 tem = DYNAMIC_CHAIN_ADDRESS (tem);
873 tem = memory_address (Pmode, tem);
874 tem = gen_frame_mem (Pmode, tem);
875 tem = copy_to_reg (tem);
878 /* For __builtin_frame_address, return what we've got. But, on
879 the SPARC for example, we may have to add a bias. */
880 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
881 return FRAME_ADDR_RTX (tem);
883 /* For __builtin_return_address, get the return address from that frame. */
884 #ifdef RETURN_ADDR_RTX
885 tem = RETURN_ADDR_RTX (count, tem);
886 #else
887 tem = memory_address (Pmode,
888 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
889 tem = gen_frame_mem (Pmode, tem);
890 #endif
891 return tem;
894 /* Alias set used for setjmp buffer. */
895 static alias_set_type setjmp_alias_set = -1;
897 /* Construct the leading half of a __builtin_setjmp call. Control will
898 return to RECEIVER_LABEL. This is also called directly by the SJLJ
899 exception handling code. */
901 void
902 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
904 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
905 rtx stack_save;
906 rtx mem;
908 if (setjmp_alias_set == -1)
909 setjmp_alias_set = new_alias_set ();
911 buf_addr = convert_memory_address (Pmode, buf_addr);
913 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
915 /* We store the frame pointer and the address of receiver_label in
916 the buffer and use the rest of it for the stack save area, which
917 is machine-dependent. */
919 mem = gen_rtx_MEM (Pmode, buf_addr);
920 set_mem_alias_set (mem, setjmp_alias_set);
921 emit_move_insn (mem, hard_frame_pointer_rtx);
923 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
924 GET_MODE_SIZE (Pmode))),
925 set_mem_alias_set (mem, setjmp_alias_set);
927 emit_move_insn (validize_mem (mem),
928 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
930 stack_save = gen_rtx_MEM (sa_mode,
931 plus_constant (Pmode, buf_addr,
932 2 * GET_MODE_SIZE (Pmode)));
933 set_mem_alias_set (stack_save, setjmp_alias_set);
934 emit_stack_save (SAVE_NONLOCAL, &stack_save);
936 /* If there is further processing to do, do it. */
937 if (targetm.have_builtin_setjmp_setup ())
938 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
940 /* We have a nonlocal label. */
941 cfun->has_nonlocal_label = 1;
944 /* Construct the trailing part of a __builtin_setjmp call. This is
945 also called directly by the SJLJ exception handling code.
946 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
948 void
949 expand_builtin_setjmp_receiver (rtx receiver_label)
951 rtx chain;
953 /* Mark the FP as used when we get here, so we have to make sure it's
954 marked as used by this function. */
955 emit_use (hard_frame_pointer_rtx);
957 /* Mark the static chain as clobbered here so life information
958 doesn't get messed up for it. */
959 chain = rtx_for_static_chain (current_function_decl, true);
960 if (chain && REG_P (chain))
961 emit_clobber (chain);
963 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
965 /* If the argument pointer can be eliminated in favor of the
966 frame pointer, we don't need to restore it. We assume here
967 that if such an elimination is present, it can always be used.
968 This is the case on all known machines; if we don't make this
969 assumption, we do unnecessary saving on many machines. */
970 size_t i;
971 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
973 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
974 if (elim_regs[i].from == ARG_POINTER_REGNUM
975 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
976 break;
978 if (i == ARRAY_SIZE (elim_regs))
980 /* Now restore our arg pointer from the address at which it
981 was saved in our stack frame. */
982 emit_move_insn (crtl->args.internal_arg_pointer,
983 copy_to_reg (get_arg_pointer_save_area ()));
987 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
988 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
989 else if (targetm.have_nonlocal_goto_receiver ())
990 emit_insn (targetm.gen_nonlocal_goto_receiver ());
991 else
992 { /* Nothing */ }
994 /* We must not allow the code we just generated to be reordered by
995 scheduling. Specifically, the update of the frame pointer must
996 happen immediately, not later. */
997 emit_insn (gen_blockage ());
1000 /* __builtin_longjmp is passed a pointer to an array of five words (not
1001 all will be used on all machines). It operates similarly to the C
1002 library function of the same name, but is more efficient. Much of
1003 the code below is copied from the handling of non-local gotos. */
1005 static void
1006 expand_builtin_longjmp (rtx buf_addr, rtx value)
1008 rtx fp, lab, stack;
1009 rtx_insn *insn, *last;
1010 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1012 /* DRAP is needed for stack realign if longjmp is expanded to current
1013 function */
1014 if (SUPPORTS_STACK_ALIGNMENT)
1015 crtl->need_drap = true;
1017 if (setjmp_alias_set == -1)
1018 setjmp_alias_set = new_alias_set ();
1020 buf_addr = convert_memory_address (Pmode, buf_addr);
1022 buf_addr = force_reg (Pmode, buf_addr);
1024 /* We require that the user must pass a second argument of 1, because
1025 that is what builtin_setjmp will return. */
1026 gcc_assert (value == const1_rtx);
1028 last = get_last_insn ();
1029 if (targetm.have_builtin_longjmp ())
1030 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1031 else
1033 fp = gen_rtx_MEM (Pmode, buf_addr);
1034 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1035 GET_MODE_SIZE (Pmode)));
1037 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1038 2 * GET_MODE_SIZE (Pmode)));
1039 set_mem_alias_set (fp, setjmp_alias_set);
1040 set_mem_alias_set (lab, setjmp_alias_set);
1041 set_mem_alias_set (stack, setjmp_alias_set);
1043 /* Pick up FP, label, and SP from the block and jump. This code is
1044 from expand_goto in stmt.cc; see there for detailed comments. */
1045 if (targetm.have_nonlocal_goto ())
1046 /* We have to pass a value to the nonlocal_goto pattern that will
1047 get copied into the static_chain pointer, but it does not matter
1048 what that value is, because builtin_setjmp does not use it. */
1049 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1050 else
1052 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1053 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1055 lab = copy_to_reg (lab);
1057 /* Restore the frame pointer and stack pointer. We must use a
1058 temporary since the setjmp buffer may be a local. */
1059 fp = copy_to_reg (fp);
1060 emit_stack_restore (SAVE_NONLOCAL, stack);
1062 /* Ensure the frame pointer move is not optimized. */
1063 emit_insn (gen_blockage ());
1064 emit_clobber (hard_frame_pointer_rtx);
1065 emit_clobber (frame_pointer_rtx);
1066 emit_move_insn (hard_frame_pointer_rtx, fp);
1068 emit_use (hard_frame_pointer_rtx);
1069 emit_use (stack_pointer_rtx);
1070 emit_indirect_jump (lab);
1074 /* Search backwards and mark the jump insn as a non-local goto.
1075 Note that this precludes the use of __builtin_longjmp to a
1076 __builtin_setjmp target in the same function. However, we've
1077 already cautioned the user that these functions are for
1078 internal exception handling use only. */
1079 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1081 gcc_assert (insn != last);
1083 if (JUMP_P (insn))
1085 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1086 break;
1088 else if (CALL_P (insn))
1089 break;
1093 static inline bool
1094 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1096 return (iter->i < iter->n);
1099 /* This function validates the types of a function call argument list
1100 against a specified list of tree_codes. If the last specifier is a 0,
1101 that represents an ellipsis, otherwise the last specifier must be a
1102 VOID_TYPE. */
1104 static bool
1105 validate_arglist (const_tree callexpr, ...)
1107 enum tree_code code;
1108 bool res = 0;
1109 va_list ap;
1110 const_call_expr_arg_iterator iter;
1111 const_tree arg;
1113 va_start (ap, callexpr);
1114 init_const_call_expr_arg_iterator (callexpr, &iter);
1116 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1117 tree fn = CALL_EXPR_FN (callexpr);
1118 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1120 for (unsigned argno = 1; ; ++argno)
1122 code = (enum tree_code) va_arg (ap, int);
1124 switch (code)
1126 case 0:
1127 /* This signifies an ellipses, any further arguments are all ok. */
1128 res = true;
1129 goto end;
1130 case VOID_TYPE:
1131 /* This signifies an endlink, if no arguments remain, return
1132 true, otherwise return false. */
1133 res = !more_const_call_expr_args_p (&iter);
1134 goto end;
1135 case POINTER_TYPE:
1136 /* The actual argument must be nonnull when either the whole
1137 called function has been declared nonnull, or when the formal
1138 argument corresponding to the actual argument has been. */
1139 if (argmap
1140 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1142 arg = next_const_call_expr_arg (&iter);
1143 if (!validate_arg (arg, code) || integer_zerop (arg))
1144 goto end;
1145 break;
1147 /* FALLTHRU */
1148 default:
1149 /* If no parameters remain or the parameter's code does not
1150 match the specified code, return false. Otherwise continue
1151 checking any remaining arguments. */
1152 arg = next_const_call_expr_arg (&iter);
1153 if (!validate_arg (arg, code))
1154 goto end;
1155 break;
1159 /* We need gotos here since we can only have one VA_CLOSE in a
1160 function. */
1161 end: ;
1162 va_end (ap);
1164 BITMAP_FREE (argmap);
1166 return res;
1169 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1170 and the address of the save area. */
1172 static rtx
1173 expand_builtin_nonlocal_goto (tree exp)
1175 tree t_label, t_save_area;
1176 rtx r_label, r_save_area, r_fp, r_sp;
1177 rtx_insn *insn;
1179 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1180 return NULL_RTX;
1182 t_label = CALL_EXPR_ARG (exp, 0);
1183 t_save_area = CALL_EXPR_ARG (exp, 1);
1185 r_label = expand_normal (t_label);
1186 r_label = convert_memory_address (Pmode, r_label);
1187 r_save_area = expand_normal (t_save_area);
1188 r_save_area = convert_memory_address (Pmode, r_save_area);
1189 /* Copy the address of the save location to a register just in case it was
1190 based on the frame pointer. */
1191 r_save_area = copy_to_reg (r_save_area);
1192 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1193 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1194 plus_constant (Pmode, r_save_area,
1195 GET_MODE_SIZE (Pmode)));
1197 crtl->has_nonlocal_goto = 1;
1199 /* ??? We no longer need to pass the static chain value, afaik. */
1200 if (targetm.have_nonlocal_goto ())
1201 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1202 else
1204 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1205 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1207 r_label = copy_to_reg (r_label);
1209 /* Restore the frame pointer and stack pointer. We must use a
1210 temporary since the setjmp buffer may be a local. */
1211 r_fp = copy_to_reg (r_fp);
1212 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1214 /* Ensure the frame pointer move is not optimized. */
1215 emit_insn (gen_blockage ());
1216 emit_clobber (hard_frame_pointer_rtx);
1217 emit_clobber (frame_pointer_rtx);
1218 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1220 /* USE of hard_frame_pointer_rtx added for consistency;
1221 not clear if really needed. */
1222 emit_use (hard_frame_pointer_rtx);
1223 emit_use (stack_pointer_rtx);
1225 /* If the architecture is using a GP register, we must
1226 conservatively assume that the target function makes use of it.
1227 The prologue of functions with nonlocal gotos must therefore
1228 initialize the GP register to the appropriate value, and we
1229 must then make sure that this value is live at the point
1230 of the jump. (Note that this doesn't necessarily apply
1231 to targets with a nonlocal_goto pattern; they are free
1232 to implement it in their own way. Note also that this is
1233 a no-op if the GP register is a global invariant.) */
1234 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1235 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1236 emit_use (pic_offset_table_rtx);
1238 emit_indirect_jump (r_label);
1241 /* Search backwards to the jump insn and mark it as a
1242 non-local goto. */
1243 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1245 if (JUMP_P (insn))
1247 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1248 break;
1250 else if (CALL_P (insn))
1251 break;
1254 return const0_rtx;
1257 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1258 (not all will be used on all machines) that was passed to __builtin_setjmp.
1259 It updates the stack pointer in that block to the current value. This is
1260 also called directly by the SJLJ exception handling code. */
1262 void
1263 expand_builtin_update_setjmp_buf (rtx buf_addr)
1265 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1266 buf_addr = convert_memory_address (Pmode, buf_addr);
1267 rtx stack_save
1268 = gen_rtx_MEM (sa_mode,
1269 memory_address
1270 (sa_mode,
1271 plus_constant (Pmode, buf_addr,
1272 2 * GET_MODE_SIZE (Pmode))));
1274 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1277 /* Expand a call to __builtin_prefetch. For a target that does not support
1278 data prefetch, evaluate the memory address argument in case it has side
1279 effects. */
1281 static void
1282 expand_builtin_prefetch (tree exp)
1284 tree arg0, arg1, arg2;
1285 int nargs;
1286 rtx op0, op1, op2;
1288 if (!validate_arglist (exp, POINTER_TYPE, 0))
1289 return;
1291 arg0 = CALL_EXPR_ARG (exp, 0);
1293 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1294 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1295 locality). */
1296 nargs = call_expr_nargs (exp);
1297 if (nargs > 1)
1298 arg1 = CALL_EXPR_ARG (exp, 1);
1299 else
1300 arg1 = integer_zero_node;
1301 if (nargs > 2)
1302 arg2 = CALL_EXPR_ARG (exp, 2);
1303 else
1304 arg2 = integer_three_node;
1306 /* Argument 0 is an address. */
1307 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1309 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1310 if (TREE_CODE (arg1) != INTEGER_CST)
1312 error ("second argument to %<__builtin_prefetch%> must be a constant");
1313 arg1 = integer_zero_node;
1315 op1 = expand_normal (arg1);
1316 /* Argument 1 must be either zero or one. */
1317 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1319 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1320 " using zero");
1321 op1 = const0_rtx;
1324 /* Argument 2 (locality) must be a compile-time constant int. */
1325 if (TREE_CODE (arg2) != INTEGER_CST)
1327 error ("third argument to %<__builtin_prefetch%> must be a constant");
1328 arg2 = integer_zero_node;
1330 op2 = expand_normal (arg2);
1331 /* Argument 2 must be 0, 1, 2, or 3. */
1332 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1334 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1335 op2 = const0_rtx;
1338 if (targetm.have_prefetch ())
1340 class expand_operand ops[3];
1342 create_address_operand (&ops[0], op0);
1343 create_integer_operand (&ops[1], INTVAL (op1));
1344 create_integer_operand (&ops[2], INTVAL (op2));
1345 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1346 return;
1349 /* Don't do anything with direct references to volatile memory, but
1350 generate code to handle other side effects. */
1351 if (!MEM_P (op0) && side_effects_p (op0))
1352 emit_insn (op0);
1355 /* Get a MEM rtx for expression EXP which is the address of an operand
1356 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
1357 the maximum length of the block of memory that might be accessed or
1358 NULL if unknown. */
1361 get_memory_rtx (tree exp, tree len)
1363 tree orig_exp = exp;
1364 rtx addr, mem;
1366 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1367 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1368 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1369 exp = TREE_OPERAND (exp, 0);
1371 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1372 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1374 /* Get an expression we can use to find the attributes to assign to MEM.
1375 First remove any nops. */
1376 while (CONVERT_EXPR_P (exp)
1377 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1378 exp = TREE_OPERAND (exp, 0);
1380 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1381 (as builtin stringops may alias with anything). */
1382 exp = fold_build2 (MEM_REF,
1383 build_array_type (char_type_node,
1384 build_range_type (sizetype,
1385 size_one_node, len)),
1386 exp, build_int_cst (ptr_type_node, 0));
1388 /* If the MEM_REF has no acceptable address, try to get the base object
1389 from the original address we got, and build an all-aliasing
1390 unknown-sized access to that one. */
1391 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1392 set_mem_attributes (mem, exp, 0);
1393 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1394 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1395 0))))
1397 exp = build_fold_addr_expr (exp);
1398 exp = fold_build2 (MEM_REF,
1399 build_array_type (char_type_node,
1400 build_range_type (sizetype,
1401 size_zero_node,
1402 NULL)),
1403 exp, build_int_cst (ptr_type_node, 0));
1404 set_mem_attributes (mem, exp, 0);
1406 set_mem_alias_set (mem, 0);
1407 return mem;
1410 /* Built-in functions to perform an untyped call and return. */
1412 #define apply_args_mode \
1413 (this_target_builtins->x_apply_args_mode)
1414 #define apply_result_mode \
1415 (this_target_builtins->x_apply_result_mode)
1417 /* Return the size required for the block returned by __builtin_apply_args,
1418 and initialize apply_args_mode. */
1420 static int
1421 apply_args_size (void)
1423 static int size = -1;
1424 int align;
1425 unsigned int regno;
1427 /* The values computed by this function never change. */
1428 if (size < 0)
1430 /* The first value is the incoming arg-pointer. */
1431 size = GET_MODE_SIZE (Pmode);
1433 /* The second value is the structure value address unless this is
1434 passed as an "invisible" first argument. */
1435 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1436 size += GET_MODE_SIZE (Pmode);
1438 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1439 if (FUNCTION_ARG_REGNO_P (regno))
1441 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1443 gcc_assert (mode != VOIDmode);
1445 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1446 if (size % align != 0)
1447 size = CEIL (size, align) * align;
1448 size += GET_MODE_SIZE (mode);
1449 apply_args_mode[regno] = mode;
1451 else
1453 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1456 return size;
1459 /* Return the size required for the block returned by __builtin_apply,
1460 and initialize apply_result_mode. */
1462 static int
1463 apply_result_size (void)
1465 static int size = -1;
1466 int align, regno;
1468 /* The values computed by this function never change. */
1469 if (size < 0)
1471 size = 0;
1473 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1474 if (targetm.calls.function_value_regno_p (regno))
1476 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1478 gcc_assert (mode != VOIDmode);
1480 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1481 if (size % align != 0)
1482 size = CEIL (size, align) * align;
1483 size += GET_MODE_SIZE (mode);
1484 apply_result_mode[regno] = mode;
1486 else
1487 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1489 /* Allow targets that use untyped_call and untyped_return to override
1490 the size so that machine-specific information can be stored here. */
1491 #ifdef APPLY_RESULT_SIZE
1492 size = APPLY_RESULT_SIZE;
1493 #endif
1495 return size;
1498 /* Create a vector describing the result block RESULT. If SAVEP is true,
1499 the result block is used to save the values; otherwise it is used to
1500 restore the values. */
1502 static rtx
1503 result_vector (int savep, rtx result)
1505 int regno, size, align, nelts;
1506 fixed_size_mode mode;
1507 rtx reg, mem;
1508 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1510 size = nelts = 0;
1511 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1512 if ((mode = apply_result_mode[regno]) != VOIDmode)
1514 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1515 if (size % align != 0)
1516 size = CEIL (size, align) * align;
1517 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1518 mem = adjust_address (result, mode, size);
1519 savevec[nelts++] = (savep
1520 ? gen_rtx_SET (mem, reg)
1521 : gen_rtx_SET (reg, mem));
1522 size += GET_MODE_SIZE (mode);
1524 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1527 /* Save the state required to perform an untyped call with the same
1528 arguments as were passed to the current function. */
1530 static rtx
1531 expand_builtin_apply_args_1 (void)
1533 rtx registers, tem;
1534 int size, align, regno;
1535 fixed_size_mode mode;
1536 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1538 /* Create a block where the arg-pointer, structure value address,
1539 and argument registers can be saved. */
1540 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1542 /* Walk past the arg-pointer and structure value address. */
1543 size = GET_MODE_SIZE (Pmode);
1544 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1545 size += GET_MODE_SIZE (Pmode);
1547 /* Save each register used in calling a function to the block. */
1548 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1549 if ((mode = apply_args_mode[regno]) != VOIDmode)
1551 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1552 if (size % align != 0)
1553 size = CEIL (size, align) * align;
1555 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1557 emit_move_insn (adjust_address (registers, mode, size), tem);
1558 size += GET_MODE_SIZE (mode);
1561 /* Save the arg pointer to the block. */
1562 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1563 /* We need the pointer as the caller actually passed them to us, not
1564 as we might have pretended they were passed. Make sure it's a valid
1565 operand, as emit_move_insn isn't expected to handle a PLUS. */
1566 if (STACK_GROWS_DOWNWARD)
1568 = force_operand (plus_constant (Pmode, tem,
1569 crtl->args.pretend_args_size),
1570 NULL_RTX);
1571 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1573 size = GET_MODE_SIZE (Pmode);
1575 /* Save the structure value address unless this is passed as an
1576 "invisible" first argument. */
1577 if (struct_incoming_value)
1578 emit_move_insn (adjust_address (registers, Pmode, size),
1579 copy_to_reg (struct_incoming_value));
1581 /* Return the address of the block. */
1582 return copy_addr_to_reg (XEXP (registers, 0));
1585 /* __builtin_apply_args returns block of memory allocated on
1586 the stack into which is stored the arg pointer, structure
1587 value address, static chain, and all the registers that might
1588 possibly be used in performing a function call. The code is
1589 moved to the start of the function so the incoming values are
1590 saved. */
1592 static rtx
1593 expand_builtin_apply_args (void)
1595 /* Don't do __builtin_apply_args more than once in a function.
1596 Save the result of the first call and reuse it. */
1597 if (apply_args_value != 0)
1598 return apply_args_value;
1600 /* When this function is called, it means that registers must be
1601 saved on entry to this function. So we migrate the
1602 call to the first insn of this function. */
1603 rtx temp;
1605 start_sequence ();
1606 temp = expand_builtin_apply_args_1 ();
1607 rtx_insn *seq = get_insns ();
1608 end_sequence ();
1610 apply_args_value = temp;
1612 /* Put the insns after the NOTE that starts the function.
1613 If this is inside a start_sequence, make the outer-level insn
1614 chain current, so the code is placed at the start of the
1615 function. If internal_arg_pointer is a non-virtual pseudo,
1616 it needs to be placed after the function that initializes
1617 that pseudo. */
1618 push_topmost_sequence ();
1619 if (REG_P (crtl->args.internal_arg_pointer)
1620 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1621 emit_insn_before (seq, parm_birth_insn);
1622 else
1623 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1624 pop_topmost_sequence ();
1625 return temp;
1629 /* Perform an untyped call and save the state required to perform an
1630 untyped return of whatever value was returned by the given function. */
1632 static rtx
1633 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1635 int size, align, regno;
1636 fixed_size_mode mode;
1637 rtx incoming_args, result, reg, dest, src;
1638 rtx_call_insn *call_insn;
1639 rtx old_stack_level = 0;
1640 rtx call_fusage = 0;
1641 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1643 arguments = convert_memory_address (Pmode, arguments);
1645 /* Create a block where the return registers can be saved. */
1646 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1648 /* Fetch the arg pointer from the ARGUMENTS block. */
1649 incoming_args = gen_reg_rtx (Pmode);
1650 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1651 if (!STACK_GROWS_DOWNWARD)
1652 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1653 incoming_args, 0, OPTAB_LIB_WIDEN);
1655 /* Push a new argument block and copy the arguments. Do not allow
1656 the (potential) memcpy call below to interfere with our stack
1657 manipulations. */
1658 do_pending_stack_adjust ();
1659 NO_DEFER_POP;
1661 /* Save the stack with nonlocal if available. */
1662 if (targetm.have_save_stack_nonlocal ())
1663 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1664 else
1665 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1667 /* Allocate a block of memory onto the stack and copy the memory
1668 arguments to the outgoing arguments address. We can pass TRUE
1669 as the 4th argument because we just saved the stack pointer
1670 and will restore it right after the call. */
1671 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1673 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1674 may have already set current_function_calls_alloca to true.
1675 current_function_calls_alloca won't be set if argsize is zero,
1676 so we have to guarantee need_drap is true here. */
1677 if (SUPPORTS_STACK_ALIGNMENT)
1678 crtl->need_drap = true;
1680 dest = virtual_outgoing_args_rtx;
1681 if (!STACK_GROWS_DOWNWARD)
1683 if (CONST_INT_P (argsize))
1684 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1685 else
1686 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1688 dest = gen_rtx_MEM (BLKmode, dest);
1689 set_mem_align (dest, PARM_BOUNDARY);
1690 src = gen_rtx_MEM (BLKmode, incoming_args);
1691 set_mem_align (src, PARM_BOUNDARY);
1692 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1694 /* Refer to the argument block. */
1695 apply_args_size ();
1696 arguments = gen_rtx_MEM (BLKmode, arguments);
1697 set_mem_align (arguments, PARM_BOUNDARY);
1699 /* Walk past the arg-pointer and structure value address. */
1700 size = GET_MODE_SIZE (Pmode);
1701 if (struct_value)
1702 size += GET_MODE_SIZE (Pmode);
1704 /* Restore each of the registers previously saved. Make USE insns
1705 for each of these registers for use in making the call. */
1706 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1707 if ((mode = apply_args_mode[regno]) != VOIDmode)
1709 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1710 if (size % align != 0)
1711 size = CEIL (size, align) * align;
1712 reg = gen_rtx_REG (mode, regno);
1713 emit_move_insn (reg, adjust_address (arguments, mode, size));
1714 use_reg (&call_fusage, reg);
1715 size += GET_MODE_SIZE (mode);
1718 /* Restore the structure value address unless this is passed as an
1719 "invisible" first argument. */
1720 size = GET_MODE_SIZE (Pmode);
1721 if (struct_value)
1723 rtx value = gen_reg_rtx (Pmode);
1724 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1725 emit_move_insn (struct_value, value);
1726 if (REG_P (struct_value))
1727 use_reg (&call_fusage, struct_value);
1730 /* All arguments and registers used for the call are set up by now! */
1731 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1733 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1734 and we don't want to load it into a register as an optimization,
1735 because prepare_call_address already did it if it should be done. */
1736 if (GET_CODE (function) != SYMBOL_REF)
1737 function = memory_address (FUNCTION_MODE, function);
1739 /* Generate the actual call instruction and save the return value. */
1740 if (targetm.have_untyped_call ())
1742 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1743 rtx_insn *seq = targetm.gen_untyped_call (mem, result,
1744 result_vector (1, result));
1745 for (rtx_insn *insn = seq; insn; insn = NEXT_INSN (insn))
1746 if (CALL_P (insn))
1747 add_reg_note (insn, REG_UNTYPED_CALL, NULL_RTX);
1748 emit_insn (seq);
1750 else if (targetm.have_call_value ())
1752 rtx valreg = 0;
1754 /* Locate the unique return register. It is not possible to
1755 express a call that sets more than one return register using
1756 call_value; use untyped_call for that. In fact, untyped_call
1757 only needs to save the return registers in the given block. */
1758 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1759 if ((mode = apply_result_mode[regno]) != VOIDmode)
1761 gcc_assert (!valreg); /* have_untyped_call required. */
1763 valreg = gen_rtx_REG (mode, regno);
1766 emit_insn (targetm.gen_call_value (valreg,
1767 gen_rtx_MEM (FUNCTION_MODE, function),
1768 const0_rtx, NULL_RTX, const0_rtx));
1770 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1772 else
1773 gcc_unreachable ();
1775 /* Find the CALL insn we just emitted, and attach the register usage
1776 information. */
1777 call_insn = last_call_insn ();
1778 add_function_usage_to (call_insn, call_fusage);
1780 /* Restore the stack. */
1781 if (targetm.have_save_stack_nonlocal ())
1782 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1783 else
1784 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1785 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1787 OK_DEFER_POP;
1789 /* Return the address of the result block. */
1790 result = copy_addr_to_reg (XEXP (result, 0));
1791 return convert_memory_address (ptr_mode, result);
1794 /* Perform an untyped return. */
1796 static void
1797 expand_builtin_return (rtx result)
1799 int size, align, regno;
1800 fixed_size_mode mode;
1801 rtx reg;
1802 rtx_insn *call_fusage = 0;
1804 result = convert_memory_address (Pmode, result);
1806 apply_result_size ();
1807 result = gen_rtx_MEM (BLKmode, result);
1809 if (targetm.have_untyped_return ())
1811 rtx vector = result_vector (0, result);
1812 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1813 emit_barrier ();
1814 return;
1817 /* Restore the return value and note that each value is used. */
1818 size = 0;
1819 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1820 if ((mode = apply_result_mode[regno]) != VOIDmode)
1822 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1823 if (size % align != 0)
1824 size = CEIL (size, align) * align;
1825 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1826 emit_move_insn (reg, adjust_address (result, mode, size));
1828 push_to_sequence (call_fusage);
1829 emit_use (reg);
1830 call_fusage = get_insns ();
1831 end_sequence ();
1832 size += GET_MODE_SIZE (mode);
1835 /* Put the USE insns before the return. */
1836 emit_insn (call_fusage);
1838 /* Return whatever values was restored by jumping directly to the end
1839 of the function. */
1840 expand_naked_return ();
1843 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1845 static enum type_class
1846 type_to_class (tree type)
1848 switch (TREE_CODE (type))
1850 case VOID_TYPE: return void_type_class;
1851 case INTEGER_TYPE: return integer_type_class;
1852 case ENUMERAL_TYPE: return enumeral_type_class;
1853 case BOOLEAN_TYPE: return boolean_type_class;
1854 case POINTER_TYPE: return pointer_type_class;
1855 case REFERENCE_TYPE: return reference_type_class;
1856 case OFFSET_TYPE: return offset_type_class;
1857 case REAL_TYPE: return real_type_class;
1858 case COMPLEX_TYPE: return complex_type_class;
1859 case FUNCTION_TYPE: return function_type_class;
1860 case METHOD_TYPE: return method_type_class;
1861 case RECORD_TYPE: return record_type_class;
1862 case UNION_TYPE:
1863 case QUAL_UNION_TYPE: return union_type_class;
1864 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1865 ? string_type_class : array_type_class);
1866 case LANG_TYPE: return lang_type_class;
1867 case OPAQUE_TYPE: return opaque_type_class;
1868 default: return no_type_class;
1872 /* Expand a call EXP to __builtin_classify_type. */
1874 static rtx
1875 expand_builtin_classify_type (tree exp)
1877 if (call_expr_nargs (exp))
1878 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1879 return GEN_INT (no_type_class);
1882 /* This helper macro, meant to be used in mathfn_built_in below, determines
1883 which among a set of builtin math functions is appropriate for a given type
1884 mode. The `F' (float) and `L' (long double) are automatically generated
1885 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1886 types, there are additional types that are considered with 'F32', 'F64',
1887 'F128', etc. suffixes. */
1888 #define CASE_MATHFN(MATHFN) \
1889 CASE_CFN_##MATHFN: \
1890 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1891 fcodel = BUILT_IN_##MATHFN##L ; break;
1892 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1893 types. */
1894 #define CASE_MATHFN_FLOATN(MATHFN) \
1895 CASE_CFN_##MATHFN: \
1896 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1897 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1898 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1899 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1900 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1901 break;
1902 /* Similar to above, but appends _R after any F/L suffix. */
1903 #define CASE_MATHFN_REENT(MATHFN) \
1904 case CFN_BUILT_IN_##MATHFN##_R: \
1905 case CFN_BUILT_IN_##MATHFN##F_R: \
1906 case CFN_BUILT_IN_##MATHFN##L_R: \
1907 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1908 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1910 /* Return a function equivalent to FN but operating on floating-point
1911 values of type TYPE, or END_BUILTINS if no such function exists.
1912 This is purely an operation on function codes; it does not guarantee
1913 that the target actually has an implementation of the function. */
1915 static built_in_function
1916 mathfn_built_in_2 (tree type, combined_fn fn)
1918 tree mtype;
1919 built_in_function fcode, fcodef, fcodel;
1920 built_in_function fcodef16 = END_BUILTINS;
1921 built_in_function fcodef32 = END_BUILTINS;
1922 built_in_function fcodef64 = END_BUILTINS;
1923 built_in_function fcodef128 = END_BUILTINS;
1924 built_in_function fcodef32x = END_BUILTINS;
1925 built_in_function fcodef64x = END_BUILTINS;
1926 built_in_function fcodef128x = END_BUILTINS;
1928 switch (fn)
1930 #define SEQ_OF_CASE_MATHFN \
1931 CASE_MATHFN (ACOS) \
1932 CASE_MATHFN (ACOSH) \
1933 CASE_MATHFN (ASIN) \
1934 CASE_MATHFN (ASINH) \
1935 CASE_MATHFN (ATAN) \
1936 CASE_MATHFN (ATAN2) \
1937 CASE_MATHFN (ATANH) \
1938 CASE_MATHFN (CBRT) \
1939 CASE_MATHFN_FLOATN (CEIL) \
1940 CASE_MATHFN (CEXPI) \
1941 CASE_MATHFN_FLOATN (COPYSIGN) \
1942 CASE_MATHFN (COS) \
1943 CASE_MATHFN (COSH) \
1944 CASE_MATHFN (DREM) \
1945 CASE_MATHFN (ERF) \
1946 CASE_MATHFN (ERFC) \
1947 CASE_MATHFN (EXP) \
1948 CASE_MATHFN (EXP10) \
1949 CASE_MATHFN (EXP2) \
1950 CASE_MATHFN (EXPM1) \
1951 CASE_MATHFN (FABS) \
1952 CASE_MATHFN (FDIM) \
1953 CASE_MATHFN_FLOATN (FLOOR) \
1954 CASE_MATHFN_FLOATN (FMA) \
1955 CASE_MATHFN_FLOATN (FMAX) \
1956 CASE_MATHFN_FLOATN (FMIN) \
1957 CASE_MATHFN (FMOD) \
1958 CASE_MATHFN (FREXP) \
1959 CASE_MATHFN (GAMMA) \
1960 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */ \
1961 CASE_MATHFN (HUGE_VAL) \
1962 CASE_MATHFN (HYPOT) \
1963 CASE_MATHFN (ILOGB) \
1964 CASE_MATHFN (ICEIL) \
1965 CASE_MATHFN (IFLOOR) \
1966 CASE_MATHFN (INF) \
1967 CASE_MATHFN (IRINT) \
1968 CASE_MATHFN (IROUND) \
1969 CASE_MATHFN (ISINF) \
1970 CASE_MATHFN (J0) \
1971 CASE_MATHFN (J1) \
1972 CASE_MATHFN (JN) \
1973 CASE_MATHFN (LCEIL) \
1974 CASE_MATHFN (LDEXP) \
1975 CASE_MATHFN (LFLOOR) \
1976 CASE_MATHFN (LGAMMA) \
1977 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */ \
1978 CASE_MATHFN (LLCEIL) \
1979 CASE_MATHFN (LLFLOOR) \
1980 CASE_MATHFN (LLRINT) \
1981 CASE_MATHFN (LLROUND) \
1982 CASE_MATHFN (LOG) \
1983 CASE_MATHFN (LOG10) \
1984 CASE_MATHFN (LOG1P) \
1985 CASE_MATHFN (LOG2) \
1986 CASE_MATHFN (LOGB) \
1987 CASE_MATHFN (LRINT) \
1988 CASE_MATHFN (LROUND) \
1989 CASE_MATHFN (MODF) \
1990 CASE_MATHFN (NAN) \
1991 CASE_MATHFN (NANS) \
1992 CASE_MATHFN_FLOATN (NEARBYINT) \
1993 CASE_MATHFN (NEXTAFTER) \
1994 CASE_MATHFN (NEXTTOWARD) \
1995 CASE_MATHFN (POW) \
1996 CASE_MATHFN (POWI) \
1997 CASE_MATHFN (POW10) \
1998 CASE_MATHFN (REMAINDER) \
1999 CASE_MATHFN (REMQUO) \
2000 CASE_MATHFN_FLOATN (RINT) \
2001 CASE_MATHFN_FLOATN (ROUND) \
2002 CASE_MATHFN_FLOATN (ROUNDEVEN) \
2003 CASE_MATHFN (SCALB) \
2004 CASE_MATHFN (SCALBLN) \
2005 CASE_MATHFN (SCALBN) \
2006 CASE_MATHFN (SIGNBIT) \
2007 CASE_MATHFN (SIGNIFICAND) \
2008 CASE_MATHFN (SIN) \
2009 CASE_MATHFN (SINCOS) \
2010 CASE_MATHFN (SINH) \
2011 CASE_MATHFN_FLOATN (SQRT) \
2012 CASE_MATHFN (TAN) \
2013 CASE_MATHFN (TANH) \
2014 CASE_MATHFN (TGAMMA) \
2015 CASE_MATHFN_FLOATN (TRUNC) \
2016 CASE_MATHFN (Y0) \
2017 CASE_MATHFN (Y1) \
2018 CASE_MATHFN (YN)
2020 SEQ_OF_CASE_MATHFN
2022 default:
2023 return END_BUILTINS;
2026 mtype = TYPE_MAIN_VARIANT (type);
2027 if (mtype == double_type_node)
2028 return fcode;
2029 else if (mtype == float_type_node)
2030 return fcodef;
2031 else if (mtype == long_double_type_node)
2032 return fcodel;
2033 else if (mtype == float16_type_node)
2034 return fcodef16;
2035 else if (mtype == float32_type_node)
2036 return fcodef32;
2037 else if (mtype == float64_type_node)
2038 return fcodef64;
2039 else if (mtype == float128_type_node)
2040 return fcodef128;
2041 else if (mtype == float32x_type_node)
2042 return fcodef32x;
2043 else if (mtype == float64x_type_node)
2044 return fcodef64x;
2045 else if (mtype == float128x_type_node)
2046 return fcodef128x;
2047 else
2048 return END_BUILTINS;
2051 #undef CASE_MATHFN
2052 #undef CASE_MATHFN_FLOATN
2053 #undef CASE_MATHFN_REENT
2055 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2056 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2057 otherwise use the explicit declaration. If we can't do the conversion,
2058 return null. */
2060 static tree
2061 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2063 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2064 if (fcode2 == END_BUILTINS)
2065 return NULL_TREE;
2067 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2068 return NULL_TREE;
2070 return builtin_decl_explicit (fcode2);
2073 /* Like mathfn_built_in_1, but always use the implicit array. */
2075 tree
2076 mathfn_built_in (tree type, combined_fn fn)
2078 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2081 /* Like mathfn_built_in_1, but take a built_in_function and
2082 always use the implicit array. */
2084 tree
2085 mathfn_built_in (tree type, enum built_in_function fn)
2087 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2090 /* Return the type associated with a built in function, i.e., the one
2091 to be passed to mathfn_built_in to get the type-specific
2092 function. */
2094 tree
2095 mathfn_built_in_type (combined_fn fn)
2097 #define CASE_MATHFN(MATHFN) \
2098 case CFN_BUILT_IN_##MATHFN: \
2099 return double_type_node; \
2100 case CFN_BUILT_IN_##MATHFN##F: \
2101 return float_type_node; \
2102 case CFN_BUILT_IN_##MATHFN##L: \
2103 return long_double_type_node;
2105 #define CASE_MATHFN_FLOATN(MATHFN) \
2106 CASE_MATHFN(MATHFN) \
2107 case CFN_BUILT_IN_##MATHFN##F16: \
2108 return float16_type_node; \
2109 case CFN_BUILT_IN_##MATHFN##F32: \
2110 return float32_type_node; \
2111 case CFN_BUILT_IN_##MATHFN##F64: \
2112 return float64_type_node; \
2113 case CFN_BUILT_IN_##MATHFN##F128: \
2114 return float128_type_node; \
2115 case CFN_BUILT_IN_##MATHFN##F32X: \
2116 return float32x_type_node; \
2117 case CFN_BUILT_IN_##MATHFN##F64X: \
2118 return float64x_type_node; \
2119 case CFN_BUILT_IN_##MATHFN##F128X: \
2120 return float128x_type_node;
2122 /* Similar to above, but appends _R after any F/L suffix. */
2123 #define CASE_MATHFN_REENT(MATHFN) \
2124 case CFN_BUILT_IN_##MATHFN##_R: \
2125 return double_type_node; \
2126 case CFN_BUILT_IN_##MATHFN##F_R: \
2127 return float_type_node; \
2128 case CFN_BUILT_IN_##MATHFN##L_R: \
2129 return long_double_type_node;
2131 switch (fn)
2133 SEQ_OF_CASE_MATHFN
2135 default:
2136 return NULL_TREE;
2139 #undef CASE_MATHFN
2140 #undef CASE_MATHFN_FLOATN
2141 #undef CASE_MATHFN_REENT
2142 #undef SEQ_OF_CASE_MATHFN
2145 /* Check whether there is an internal function associated with function FN
2146 and return type RETURN_TYPE. Return the function if so, otherwise return
2147 IFN_LAST.
2149 Note that this function only tests whether the function is defined in
2150 internals.def, not whether it is actually available on the target. */
2152 static internal_fn
2153 associated_internal_fn (built_in_function fn, tree return_type)
2155 switch (fn)
2157 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2158 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2159 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2160 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2161 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2162 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2163 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2164 #include "internal-fn.def"
2166 CASE_FLT_FN (BUILT_IN_POW10):
2167 return IFN_EXP10;
2169 CASE_FLT_FN (BUILT_IN_DREM):
2170 return IFN_REMAINDER;
2172 CASE_FLT_FN (BUILT_IN_SCALBN):
2173 CASE_FLT_FN (BUILT_IN_SCALBLN):
2174 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2175 return IFN_LDEXP;
2176 return IFN_LAST;
2178 default:
2179 return IFN_LAST;
2183 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2184 return its code, otherwise return IFN_LAST. Note that this function
2185 only tests whether the function is defined in internals.def, not whether
2186 it is actually available on the target. */
2188 internal_fn
2189 associated_internal_fn (tree fndecl)
2191 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2192 return associated_internal_fn (DECL_FUNCTION_CODE (fndecl),
2193 TREE_TYPE (TREE_TYPE (fndecl)));
2196 /* Check whether there is an internal function associated with function CFN
2197 and return type RETURN_TYPE. Return the function if so, otherwise return
2198 IFN_LAST.
2200 Note that this function only tests whether the function is defined in
2201 internals.def, not whether it is actually available on the target. */
2203 internal_fn
2204 associated_internal_fn (combined_fn cfn, tree return_type)
2206 if (internal_fn_p (cfn))
2207 return as_internal_fn (cfn);
2208 return associated_internal_fn (as_builtin_fn (cfn), return_type);
2211 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2212 on the current target by a call to an internal function, return the
2213 code of that internal function, otherwise return IFN_LAST. The caller
2214 is responsible for ensuring that any side-effects of the built-in
2215 call are dealt with correctly. E.g. if CALL sets errno, the caller
2216 must decide that the errno result isn't needed or make it available
2217 in some other way. */
2219 internal_fn
2220 replacement_internal_fn (gcall *call)
2222 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2224 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2225 if (ifn != IFN_LAST)
2227 tree_pair types = direct_internal_fn_types (ifn, call);
2228 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2229 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2230 return ifn;
2233 return IFN_LAST;
2236 /* Expand a call to the builtin trinary math functions (fma).
2237 Return NULL_RTX if a normal call should be emitted rather than expanding the
2238 function in-line. EXP is the expression that is a call to the builtin
2239 function; if convenient, the result should be placed in TARGET.
2240 SUBTARGET may be used as the target for computing one of EXP's
2241 operands. */
2243 static rtx
2244 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2246 optab builtin_optab;
2247 rtx op0, op1, op2, result;
2248 rtx_insn *insns;
2249 tree fndecl = get_callee_fndecl (exp);
2250 tree arg0, arg1, arg2;
2251 machine_mode mode;
2253 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2254 return NULL_RTX;
2256 arg0 = CALL_EXPR_ARG (exp, 0);
2257 arg1 = CALL_EXPR_ARG (exp, 1);
2258 arg2 = CALL_EXPR_ARG (exp, 2);
2260 switch (DECL_FUNCTION_CODE (fndecl))
2262 CASE_FLT_FN (BUILT_IN_FMA):
2263 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2264 builtin_optab = fma_optab; break;
2265 default:
2266 gcc_unreachable ();
2269 /* Make a suitable register to place result in. */
2270 mode = TYPE_MODE (TREE_TYPE (exp));
2272 /* Before working hard, check whether the instruction is available. */
2273 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2274 return NULL_RTX;
2276 result = gen_reg_rtx (mode);
2278 /* Always stabilize the argument list. */
2279 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2280 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2281 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2283 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2284 op1 = expand_normal (arg1);
2285 op2 = expand_normal (arg2);
2287 start_sequence ();
2289 /* Compute into RESULT.
2290 Set RESULT to wherever the result comes back. */
2291 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2292 result, 0);
2294 /* If we were unable to expand via the builtin, stop the sequence
2295 (without outputting the insns) and call to the library function
2296 with the stabilized argument list. */
2297 if (result == 0)
2299 end_sequence ();
2300 return expand_call (exp, target, target == const0_rtx);
2303 /* Output the entire sequence. */
2304 insns = get_insns ();
2305 end_sequence ();
2306 emit_insn (insns);
2308 return result;
2311 /* Expand a call to the builtin sin and cos math functions.
2312 Return NULL_RTX if a normal call should be emitted rather than expanding the
2313 function in-line. EXP is the expression that is a call to the builtin
2314 function; if convenient, the result should be placed in TARGET.
2315 SUBTARGET may be used as the target for computing one of EXP's
2316 operands. */
2318 static rtx
2319 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2321 optab builtin_optab;
2322 rtx op0;
2323 rtx_insn *insns;
2324 tree fndecl = get_callee_fndecl (exp);
2325 machine_mode mode;
2326 tree arg;
2328 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2329 return NULL_RTX;
2331 arg = CALL_EXPR_ARG (exp, 0);
2333 switch (DECL_FUNCTION_CODE (fndecl))
2335 CASE_FLT_FN (BUILT_IN_SIN):
2336 CASE_FLT_FN (BUILT_IN_COS):
2337 builtin_optab = sincos_optab; break;
2338 default:
2339 gcc_unreachable ();
2342 /* Make a suitable register to place result in. */
2343 mode = TYPE_MODE (TREE_TYPE (exp));
2345 /* Check if sincos insn is available, otherwise fallback
2346 to sin or cos insn. */
2347 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2348 switch (DECL_FUNCTION_CODE (fndecl))
2350 CASE_FLT_FN (BUILT_IN_SIN):
2351 builtin_optab = sin_optab; break;
2352 CASE_FLT_FN (BUILT_IN_COS):
2353 builtin_optab = cos_optab; break;
2354 default:
2355 gcc_unreachable ();
2358 /* Before working hard, check whether the instruction is available. */
2359 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2361 rtx result = gen_reg_rtx (mode);
2363 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2364 need to expand the argument again. This way, we will not perform
2365 side-effects more the once. */
2366 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2368 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2370 start_sequence ();
2372 /* Compute into RESULT.
2373 Set RESULT to wherever the result comes back. */
2374 if (builtin_optab == sincos_optab)
2376 int ok;
2378 switch (DECL_FUNCTION_CODE (fndecl))
2380 CASE_FLT_FN (BUILT_IN_SIN):
2381 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2382 break;
2383 CASE_FLT_FN (BUILT_IN_COS):
2384 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2385 break;
2386 default:
2387 gcc_unreachable ();
2389 gcc_assert (ok);
2391 else
2392 result = expand_unop (mode, builtin_optab, op0, result, 0);
2394 if (result != 0)
2396 /* Output the entire sequence. */
2397 insns = get_insns ();
2398 end_sequence ();
2399 emit_insn (insns);
2400 return result;
2403 /* If we were unable to expand via the builtin, stop the sequence
2404 (without outputting the insns) and call to the library function
2405 with the stabilized argument list. */
2406 end_sequence ();
2409 return expand_call (exp, target, target == const0_rtx);
2412 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2413 return an RTL instruction code that implements the functionality.
2414 If that isn't possible or available return CODE_FOR_nothing. */
2416 static enum insn_code
2417 interclass_mathfn_icode (tree arg, tree fndecl)
2419 bool errno_set = false;
2420 optab builtin_optab = unknown_optab;
2421 machine_mode mode;
2423 switch (DECL_FUNCTION_CODE (fndecl))
2425 CASE_FLT_FN (BUILT_IN_ILOGB):
2426 errno_set = true; builtin_optab = ilogb_optab; break;
2427 CASE_FLT_FN (BUILT_IN_ISINF):
2428 builtin_optab = isinf_optab; break;
2429 case BUILT_IN_ISNORMAL:
2430 case BUILT_IN_ISFINITE:
2431 CASE_FLT_FN (BUILT_IN_FINITE):
2432 case BUILT_IN_FINITED32:
2433 case BUILT_IN_FINITED64:
2434 case BUILT_IN_FINITED128:
2435 case BUILT_IN_ISINFD32:
2436 case BUILT_IN_ISINFD64:
2437 case BUILT_IN_ISINFD128:
2438 /* These builtins have no optabs (yet). */
2439 break;
2440 default:
2441 gcc_unreachable ();
2444 /* There's no easy way to detect the case we need to set EDOM. */
2445 if (flag_errno_math && errno_set)
2446 return CODE_FOR_nothing;
2448 /* Optab mode depends on the mode of the input argument. */
2449 mode = TYPE_MODE (TREE_TYPE (arg));
2451 if (builtin_optab)
2452 return optab_handler (builtin_optab, mode);
2453 return CODE_FOR_nothing;
2456 /* Expand a call to one of the builtin math functions that operate on
2457 floating point argument and output an integer result (ilogb, isinf,
2458 isnan, etc).
2459 Return 0 if a normal call should be emitted rather than expanding the
2460 function in-line. EXP is the expression that is a call to the builtin
2461 function; if convenient, the result should be placed in TARGET. */
2463 static rtx
2464 expand_builtin_interclass_mathfn (tree exp, rtx target)
2466 enum insn_code icode = CODE_FOR_nothing;
2467 rtx op0;
2468 tree fndecl = get_callee_fndecl (exp);
2469 machine_mode mode;
2470 tree arg;
2472 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2473 return NULL_RTX;
2475 arg = CALL_EXPR_ARG (exp, 0);
2476 icode = interclass_mathfn_icode (arg, fndecl);
2477 mode = TYPE_MODE (TREE_TYPE (arg));
2479 if (icode != CODE_FOR_nothing)
2481 class expand_operand ops[1];
2482 rtx_insn *last = get_last_insn ();
2483 tree orig_arg = arg;
2485 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2486 need to expand the argument again. This way, we will not perform
2487 side-effects more the once. */
2488 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2490 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2492 if (mode != GET_MODE (op0))
2493 op0 = convert_to_mode (mode, op0, 0);
2495 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2496 if (maybe_legitimize_operands (icode, 0, 1, ops)
2497 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2498 return ops[0].value;
2500 delete_insns_since (last);
2501 CALL_EXPR_ARG (exp, 0) = orig_arg;
2504 return NULL_RTX;
2507 /* Expand a call to the builtin sincos math function.
2508 Return NULL_RTX if a normal call should be emitted rather than expanding the
2509 function in-line. EXP is the expression that is a call to the builtin
2510 function. */
2512 static rtx
2513 expand_builtin_sincos (tree exp)
2515 rtx op0, op1, op2, target1, target2;
2516 machine_mode mode;
2517 tree arg, sinp, cosp;
2518 int result;
2519 location_t loc = EXPR_LOCATION (exp);
2520 tree alias_type, alias_off;
2522 if (!validate_arglist (exp, REAL_TYPE,
2523 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2524 return NULL_RTX;
2526 arg = CALL_EXPR_ARG (exp, 0);
2527 sinp = CALL_EXPR_ARG (exp, 1);
2528 cosp = CALL_EXPR_ARG (exp, 2);
2530 /* Make a suitable register to place result in. */
2531 mode = TYPE_MODE (TREE_TYPE (arg));
2533 /* Check if sincos insn is available, otherwise emit the call. */
2534 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2535 return NULL_RTX;
2537 target1 = gen_reg_rtx (mode);
2538 target2 = gen_reg_rtx (mode);
2540 op0 = expand_normal (arg);
2541 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2542 alias_off = build_int_cst (alias_type, 0);
2543 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2544 sinp, alias_off));
2545 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2546 cosp, alias_off));
2548 /* Compute into target1 and target2.
2549 Set TARGET to wherever the result comes back. */
2550 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2551 gcc_assert (result);
2553 /* Move target1 and target2 to the memory locations indicated
2554 by op1 and op2. */
2555 emit_move_insn (op1, target1);
2556 emit_move_insn (op2, target2);
2558 return const0_rtx;
2561 /* Expand call EXP to the fegetround builtin (from C99 fenv.h), returning the
2562 result and setting it in TARGET. Otherwise return NULL_RTX on failure. */
2563 static rtx
2564 expand_builtin_fegetround (tree exp, rtx target, machine_mode target_mode)
2566 if (!validate_arglist (exp, VOID_TYPE))
2567 return NULL_RTX;
2569 insn_code icode = direct_optab_handler (fegetround_optab, SImode);
2570 if (icode == CODE_FOR_nothing)
2571 return NULL_RTX;
2573 if (target == 0
2574 || GET_MODE (target) != target_mode
2575 || !(*insn_data[icode].operand[0].predicate) (target, target_mode))
2576 target = gen_reg_rtx (target_mode);
2578 rtx pat = GEN_FCN (icode) (target);
2579 if (!pat)
2580 return NULL_RTX;
2581 emit_insn (pat);
2583 return target;
2586 /* Expand call EXP to either feclearexcept or feraiseexcept builtins (from C99
2587 fenv.h), returning the result and setting it in TARGET. Otherwise return
2588 NULL_RTX on failure. */
2589 static rtx
2590 expand_builtin_feclear_feraise_except (tree exp, rtx target,
2591 machine_mode target_mode, optab op_optab)
2593 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
2594 return NULL_RTX;
2595 rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
2597 insn_code icode = direct_optab_handler (op_optab, SImode);
2598 if (icode == CODE_FOR_nothing)
2599 return NULL_RTX;
2601 if (!(*insn_data[icode].operand[1].predicate) (op0, GET_MODE (op0)))
2602 return NULL_RTX;
2604 if (target == 0
2605 || GET_MODE (target) != target_mode
2606 || !(*insn_data[icode].operand[0].predicate) (target, target_mode))
2607 target = gen_reg_rtx (target_mode);
2609 rtx pat = GEN_FCN (icode) (target, op0);
2610 if (!pat)
2611 return NULL_RTX;
2612 emit_insn (pat);
2614 return target;
2617 /* Expand a call to the internal cexpi builtin to the sincos math function.
2618 EXP is the expression that is a call to the builtin function; if convenient,
2619 the result should be placed in TARGET. */
2621 static rtx
2622 expand_builtin_cexpi (tree exp, rtx target)
2624 tree fndecl = get_callee_fndecl (exp);
2625 tree arg, type;
2626 machine_mode mode;
2627 rtx op0, op1, op2;
2628 location_t loc = EXPR_LOCATION (exp);
2630 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2631 return NULL_RTX;
2633 arg = CALL_EXPR_ARG (exp, 0);
2634 type = TREE_TYPE (arg);
2635 mode = TYPE_MODE (TREE_TYPE (arg));
2637 /* Try expanding via a sincos optab, fall back to emitting a libcall
2638 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2639 is only generated from sincos, cexp or if we have either of them. */
2640 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2642 op1 = gen_reg_rtx (mode);
2643 op2 = gen_reg_rtx (mode);
2645 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2647 /* Compute into op1 and op2. */
2648 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2650 else if (targetm.libc_has_function (function_sincos, type))
2652 tree call, fn = NULL_TREE;
2653 tree top1, top2;
2654 rtx op1a, op2a;
2656 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2657 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2658 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2659 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2660 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2661 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2662 else
2663 gcc_unreachable ();
2665 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2666 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2667 op1a = copy_addr_to_reg (XEXP (op1, 0));
2668 op2a = copy_addr_to_reg (XEXP (op2, 0));
2669 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2670 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2672 /* Make sure not to fold the sincos call again. */
2673 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2674 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2675 call, 3, arg, top1, top2));
2677 else
2679 tree call, fn = NULL_TREE, narg;
2680 tree ctype = build_complex_type (type);
2682 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2683 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2684 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2685 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2686 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2687 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2688 else
2689 gcc_unreachable ();
2691 /* If we don't have a decl for cexp create one. This is the
2692 friendliest fallback if the user calls __builtin_cexpi
2693 without full target C99 function support. */
2694 if (fn == NULL_TREE)
2696 tree fntype;
2697 const char *name = NULL;
2699 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2700 name = "cexpf";
2701 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2702 name = "cexp";
2703 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2704 name = "cexpl";
2706 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2707 fn = build_fn_decl (name, fntype);
2710 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2711 build_real (type, dconst0), arg);
2713 /* Make sure not to fold the cexp call again. */
2714 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2715 return expand_expr (build_call_nary (ctype, call, 1, narg),
2716 target, VOIDmode, EXPAND_NORMAL);
2719 /* Now build the proper return type. */
2720 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2721 make_tree (TREE_TYPE (arg), op2),
2722 make_tree (TREE_TYPE (arg), op1)),
2723 target, VOIDmode, EXPAND_NORMAL);
2726 /* Conveniently construct a function call expression. FNDECL names the
2727 function to be called, N is the number of arguments, and the "..."
2728 parameters are the argument expressions. Unlike build_call_exr
2729 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2731 static tree
2732 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2734 va_list ap;
2735 tree fntype = TREE_TYPE (fndecl);
2736 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2738 va_start (ap, n);
2739 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2740 va_end (ap);
2741 SET_EXPR_LOCATION (fn, loc);
2742 return fn;
2745 /* Expand a call to one of the builtin rounding functions gcc defines
2746 as an extension (lfloor and lceil). As these are gcc extensions we
2747 do not need to worry about setting errno to EDOM.
2748 If expanding via optab fails, lower expression to (int)(floor(x)).
2749 EXP is the expression that is a call to the builtin function;
2750 if convenient, the result should be placed in TARGET. */
2752 static rtx
2753 expand_builtin_int_roundingfn (tree exp, rtx target)
2755 convert_optab builtin_optab;
2756 rtx op0, tmp;
2757 rtx_insn *insns;
2758 tree fndecl = get_callee_fndecl (exp);
2759 enum built_in_function fallback_fn;
2760 tree fallback_fndecl;
2761 machine_mode mode;
2762 tree arg;
2764 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2765 return NULL_RTX;
2767 arg = CALL_EXPR_ARG (exp, 0);
2769 switch (DECL_FUNCTION_CODE (fndecl))
2771 CASE_FLT_FN (BUILT_IN_ICEIL):
2772 CASE_FLT_FN (BUILT_IN_LCEIL):
2773 CASE_FLT_FN (BUILT_IN_LLCEIL):
2774 builtin_optab = lceil_optab;
2775 fallback_fn = BUILT_IN_CEIL;
2776 break;
2778 CASE_FLT_FN (BUILT_IN_IFLOOR):
2779 CASE_FLT_FN (BUILT_IN_LFLOOR):
2780 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2781 builtin_optab = lfloor_optab;
2782 fallback_fn = BUILT_IN_FLOOR;
2783 break;
2785 default:
2786 gcc_unreachable ();
2789 /* Make a suitable register to place result in. */
2790 mode = TYPE_MODE (TREE_TYPE (exp));
2792 target = gen_reg_rtx (mode);
2794 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2795 need to expand the argument again. This way, we will not perform
2796 side-effects more the once. */
2797 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2799 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2801 start_sequence ();
2803 /* Compute into TARGET. */
2804 if (expand_sfix_optab (target, op0, builtin_optab))
2806 /* Output the entire sequence. */
2807 insns = get_insns ();
2808 end_sequence ();
2809 emit_insn (insns);
2810 return target;
2813 /* If we were unable to expand via the builtin, stop the sequence
2814 (without outputting the insns). */
2815 end_sequence ();
2817 /* Fall back to floating point rounding optab. */
2818 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2820 /* For non-C99 targets we may end up without a fallback fndecl here
2821 if the user called __builtin_lfloor directly. In this case emit
2822 a call to the floor/ceil variants nevertheless. This should result
2823 in the best user experience for not full C99 targets. */
2824 if (fallback_fndecl == NULL_TREE)
2826 tree fntype;
2827 const char *name = NULL;
2829 switch (DECL_FUNCTION_CODE (fndecl))
2831 case BUILT_IN_ICEIL:
2832 case BUILT_IN_LCEIL:
2833 case BUILT_IN_LLCEIL:
2834 name = "ceil";
2835 break;
2836 case BUILT_IN_ICEILF:
2837 case BUILT_IN_LCEILF:
2838 case BUILT_IN_LLCEILF:
2839 name = "ceilf";
2840 break;
2841 case BUILT_IN_ICEILL:
2842 case BUILT_IN_LCEILL:
2843 case BUILT_IN_LLCEILL:
2844 name = "ceill";
2845 break;
2846 case BUILT_IN_IFLOOR:
2847 case BUILT_IN_LFLOOR:
2848 case BUILT_IN_LLFLOOR:
2849 name = "floor";
2850 break;
2851 case BUILT_IN_IFLOORF:
2852 case BUILT_IN_LFLOORF:
2853 case BUILT_IN_LLFLOORF:
2854 name = "floorf";
2855 break;
2856 case BUILT_IN_IFLOORL:
2857 case BUILT_IN_LFLOORL:
2858 case BUILT_IN_LLFLOORL:
2859 name = "floorl";
2860 break;
2861 default:
2862 gcc_unreachable ();
2865 fntype = build_function_type_list (TREE_TYPE (arg),
2866 TREE_TYPE (arg), NULL_TREE);
2867 fallback_fndecl = build_fn_decl (name, fntype);
2870 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2872 tmp = expand_normal (exp);
2873 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2875 /* Truncate the result of floating point optab to integer
2876 via expand_fix (). */
2877 target = gen_reg_rtx (mode);
2878 expand_fix (target, tmp, 0);
2880 return target;
2883 /* Expand a call to one of the builtin math functions doing integer
2884 conversion (lrint).
2885 Return 0 if a normal call should be emitted rather than expanding the
2886 function in-line. EXP is the expression that is a call to the builtin
2887 function; if convenient, the result should be placed in TARGET. */
2889 static rtx
2890 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2892 convert_optab builtin_optab;
2893 rtx op0;
2894 rtx_insn *insns;
2895 tree fndecl = get_callee_fndecl (exp);
2896 tree arg;
2897 machine_mode mode;
2898 enum built_in_function fallback_fn = BUILT_IN_NONE;
2900 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2901 return NULL_RTX;
2903 arg = CALL_EXPR_ARG (exp, 0);
2905 switch (DECL_FUNCTION_CODE (fndecl))
2907 CASE_FLT_FN (BUILT_IN_IRINT):
2908 fallback_fn = BUILT_IN_LRINT;
2909 gcc_fallthrough ();
2910 CASE_FLT_FN (BUILT_IN_LRINT):
2911 CASE_FLT_FN (BUILT_IN_LLRINT):
2912 builtin_optab = lrint_optab;
2913 break;
2915 CASE_FLT_FN (BUILT_IN_IROUND):
2916 fallback_fn = BUILT_IN_LROUND;
2917 gcc_fallthrough ();
2918 CASE_FLT_FN (BUILT_IN_LROUND):
2919 CASE_FLT_FN (BUILT_IN_LLROUND):
2920 builtin_optab = lround_optab;
2921 break;
2923 default:
2924 gcc_unreachable ();
2927 /* There's no easy way to detect the case we need to set EDOM. */
2928 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2929 return NULL_RTX;
2931 /* Make a suitable register to place result in. */
2932 mode = TYPE_MODE (TREE_TYPE (exp));
2934 /* There's no easy way to detect the case we need to set EDOM. */
2935 if (!flag_errno_math)
2937 rtx result = gen_reg_rtx (mode);
2939 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2940 need to expand the argument again. This way, we will not perform
2941 side-effects more the once. */
2942 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2944 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2946 start_sequence ();
2948 if (expand_sfix_optab (result, op0, builtin_optab))
2950 /* Output the entire sequence. */
2951 insns = get_insns ();
2952 end_sequence ();
2953 emit_insn (insns);
2954 return result;
2957 /* If we were unable to expand via the builtin, stop the sequence
2958 (without outputting the insns) and call to the library function
2959 with the stabilized argument list. */
2960 end_sequence ();
2963 if (fallback_fn != BUILT_IN_NONE)
2965 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2966 targets, (int) round (x) should never be transformed into
2967 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2968 a call to lround in the hope that the target provides at least some
2969 C99 functions. This should result in the best user experience for
2970 not full C99 targets. */
2971 tree fallback_fndecl = mathfn_built_in_1
2972 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2974 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2975 fallback_fndecl, 1, arg);
2977 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2978 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2979 return convert_to_mode (mode, target, 0);
2982 return expand_call (exp, target, target == const0_rtx);
2985 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2986 a normal call should be emitted rather than expanding the function
2987 in-line. EXP is the expression that is a call to the builtin
2988 function; if convenient, the result should be placed in TARGET. */
2990 static rtx
2991 expand_builtin_powi (tree exp, rtx target)
2993 tree arg0, arg1;
2994 rtx op0, op1;
2995 machine_mode mode;
2996 machine_mode mode2;
2998 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2999 return NULL_RTX;
3001 arg0 = CALL_EXPR_ARG (exp, 0);
3002 arg1 = CALL_EXPR_ARG (exp, 1);
3003 mode = TYPE_MODE (TREE_TYPE (exp));
3005 /* Emit a libcall to libgcc. */
3007 /* Mode of the 2nd argument must match that of an int. */
3008 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
3010 if (target == NULL_RTX)
3011 target = gen_reg_rtx (mode);
3013 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
3014 if (GET_MODE (op0) != mode)
3015 op0 = convert_to_mode (mode, op0, 0);
3016 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3017 if (GET_MODE (op1) != mode2)
3018 op1 = convert_to_mode (mode2, op1, 0);
3020 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3021 target, LCT_CONST, mode,
3022 op0, mode, op1, mode2);
3024 return target;
3027 /* Expand expression EXP which is a call to the strlen builtin. Return
3028 NULL_RTX if we failed and the caller should emit a normal call, otherwise
3029 try to get the result in TARGET, if convenient. */
3031 static rtx
3032 expand_builtin_strlen (tree exp, rtx target,
3033 machine_mode target_mode)
3035 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3036 return NULL_RTX;
3038 tree src = CALL_EXPR_ARG (exp, 0);
3040 /* If the length can be computed at compile-time, return it. */
3041 if (tree len = c_strlen (src, 0))
3042 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3044 /* If the length can be computed at compile-time and is constant
3045 integer, but there are side-effects in src, evaluate
3046 src for side-effects, then return len.
3047 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3048 can be optimized into: i++; x = 3; */
3049 tree len = c_strlen (src, 1);
3050 if (len && TREE_CODE (len) == INTEGER_CST)
3052 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3053 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3056 unsigned int align = get_pointer_alignment (src) / BITS_PER_UNIT;
3058 /* If SRC is not a pointer type, don't do this operation inline. */
3059 if (align == 0)
3060 return NULL_RTX;
3062 /* Bail out if we can't compute strlen in the right mode. */
3063 machine_mode insn_mode;
3064 enum insn_code icode = CODE_FOR_nothing;
3065 FOR_EACH_MODE_FROM (insn_mode, target_mode)
3067 icode = optab_handler (strlen_optab, insn_mode);
3068 if (icode != CODE_FOR_nothing)
3069 break;
3071 if (insn_mode == VOIDmode)
3072 return NULL_RTX;
3074 /* Make a place to hold the source address. We will not expand
3075 the actual source until we are sure that the expansion will
3076 not fail -- there are trees that cannot be expanded twice. */
3077 rtx src_reg = gen_reg_rtx (Pmode);
3079 /* Mark the beginning of the strlen sequence so we can emit the
3080 source operand later. */
3081 rtx_insn *before_strlen = get_last_insn ();
3083 class expand_operand ops[4];
3084 create_output_operand (&ops[0], target, insn_mode);
3085 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3086 create_integer_operand (&ops[2], 0);
3087 create_integer_operand (&ops[3], align);
3088 if (!maybe_expand_insn (icode, 4, ops))
3089 return NULL_RTX;
3091 /* Check to see if the argument was declared attribute nonstring
3092 and if so, issue a warning since at this point it's not known
3093 to be nul-terminated. */
3094 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3096 /* Now that we are assured of success, expand the source. */
3097 start_sequence ();
3098 rtx pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3099 if (pat != src_reg)
3101 #ifdef POINTERS_EXTEND_UNSIGNED
3102 if (GET_MODE (pat) != Pmode)
3103 pat = convert_to_mode (Pmode, pat,
3104 POINTERS_EXTEND_UNSIGNED);
3105 #endif
3106 emit_move_insn (src_reg, pat);
3108 pat = get_insns ();
3109 end_sequence ();
3111 if (before_strlen)
3112 emit_insn_after (pat, before_strlen);
3113 else
3114 emit_insn_before (pat, get_insns ());
3116 /* Return the value in the proper mode for this function. */
3117 if (GET_MODE (ops[0].value) == target_mode)
3118 target = ops[0].value;
3119 else if (target != 0)
3120 convert_move (target, ops[0].value, 0);
3121 else
3122 target = convert_to_mode (target_mode, ops[0].value, 0);
3124 return target;
3127 /* Expand call EXP to the strnlen built-in, returning the result
3128 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3130 static rtx
3131 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3133 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3134 return NULL_RTX;
3136 tree src = CALL_EXPR_ARG (exp, 0);
3137 tree bound = CALL_EXPR_ARG (exp, 1);
3139 if (!bound)
3140 return NULL_RTX;
3142 location_t loc = UNKNOWN_LOCATION;
3143 if (EXPR_HAS_LOCATION (exp))
3144 loc = EXPR_LOCATION (exp);
3146 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3147 so these conversions aren't necessary. */
3148 c_strlen_data lendata = { };
3149 tree len = c_strlen (src, 0, &lendata, 1);
3150 if (len)
3151 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3153 if (TREE_CODE (bound) == INTEGER_CST)
3155 if (!len)
3156 return NULL_RTX;
3158 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3159 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3162 if (TREE_CODE (bound) != SSA_NAME)
3163 return NULL_RTX;
3165 wide_int min, max;
3166 value_range r;
3167 get_global_range_query ()->range_of_expr (r, bound);
3168 if (r.kind () != VR_RANGE)
3169 return NULL_RTX;
3170 min = r.lower_bound ();
3171 max = r.upper_bound ();
3173 if (!len || TREE_CODE (len) != INTEGER_CST)
3175 bool exact;
3176 lendata.decl = unterminated_array (src, &len, &exact);
3177 if (!lendata.decl)
3178 return NULL_RTX;
3181 if (lendata.decl)
3182 return NULL_RTX;
3184 if (wi::gtu_p (min, wi::to_wide (len)))
3185 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3187 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3188 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3191 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3192 bytes from bytes at DATA + OFFSET and return it reinterpreted as
3193 a target constant. */
3195 static rtx
3196 builtin_memcpy_read_str (void *data, void *, HOST_WIDE_INT offset,
3197 fixed_size_mode mode)
3199 /* The REPresentation pointed to by DATA need not be a nul-terminated
3200 string but the caller guarantees it's large enough for MODE. */
3201 const char *rep = (const char *) data;
3203 /* The by-pieces infrastructure does not try to pick a vector mode
3204 for memcpy expansion. */
3205 return c_readstr (rep + offset, as_a <scalar_int_mode> (mode),
3206 /*nul_terminated=*/false);
3209 /* LEN specify length of the block of memcpy/memset operation.
3210 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3211 In some cases we can make very likely guess on max size, then we
3212 set it into PROBABLE_MAX_SIZE. */
3214 static void
3215 determine_block_size (tree len, rtx len_rtx,
3216 unsigned HOST_WIDE_INT *min_size,
3217 unsigned HOST_WIDE_INT *max_size,
3218 unsigned HOST_WIDE_INT *probable_max_size)
3220 if (CONST_INT_P (len_rtx))
3222 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3223 return;
3225 else
3227 wide_int min, max;
3228 enum value_range_kind range_type = VR_UNDEFINED;
3230 /* Determine bounds from the type. */
3231 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3232 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3233 else
3234 *min_size = 0;
3235 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3236 *probable_max_size = *max_size
3237 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3238 else
3239 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3241 if (TREE_CODE (len) == SSA_NAME)
3243 value_range r;
3244 get_global_range_query ()->range_of_expr (r, len);
3245 range_type = r.kind ();
3246 if (range_type != VR_UNDEFINED)
3248 min = wi::to_wide (r.min ());
3249 max = wi::to_wide (r.max ());
3252 if (range_type == VR_RANGE)
3254 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3255 *min_size = min.to_uhwi ();
3256 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3257 *probable_max_size = *max_size = max.to_uhwi ();
3259 else if (range_type == VR_ANTI_RANGE)
3261 /* Code like
3263 int n;
3264 if (n < 100)
3265 memcpy (a, b, n)
3267 Produce anti range allowing negative values of N. We still
3268 can use the information and make a guess that N is not negative.
3270 if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3271 *probable_max_size = min.to_uhwi () - 1;
3274 gcc_checking_assert (*max_size <=
3275 (unsigned HOST_WIDE_INT)
3276 GET_MODE_MASK (GET_MODE (len_rtx)));
3279 /* Expand a call EXP to the memcpy builtin.
3280 Return NULL_RTX if we failed, the caller should emit a normal call,
3281 otherwise try to get the result in TARGET, if convenient (and in
3282 mode MODE if that's convenient). */
3284 static rtx
3285 expand_builtin_memcpy (tree exp, rtx target)
3287 if (!validate_arglist (exp,
3288 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3289 return NULL_RTX;
3291 tree dest = CALL_EXPR_ARG (exp, 0);
3292 tree src = CALL_EXPR_ARG (exp, 1);
3293 tree len = CALL_EXPR_ARG (exp, 2);
3295 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3296 /*retmode=*/ RETURN_BEGIN, false);
3299 /* Check a call EXP to the memmove built-in for validity.
3300 Return NULL_RTX on both success and failure. */
3302 static rtx
3303 expand_builtin_memmove (tree exp, rtx target)
3305 if (!validate_arglist (exp,
3306 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3307 return NULL_RTX;
3309 tree dest = CALL_EXPR_ARG (exp, 0);
3310 tree src = CALL_EXPR_ARG (exp, 1);
3311 tree len = CALL_EXPR_ARG (exp, 2);
3313 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3314 /*retmode=*/ RETURN_BEGIN, true);
3317 /* Expand a call EXP to the mempcpy builtin.
3318 Return NULL_RTX if we failed; the caller should emit a normal call,
3319 otherwise try to get the result in TARGET, if convenient (and in
3320 mode MODE if that's convenient). */
3322 static rtx
3323 expand_builtin_mempcpy (tree exp, rtx target)
3325 if (!validate_arglist (exp,
3326 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3327 return NULL_RTX;
3329 tree dest = CALL_EXPR_ARG (exp, 0);
3330 tree src = CALL_EXPR_ARG (exp, 1);
3331 tree len = CALL_EXPR_ARG (exp, 2);
3333 /* Policy does not generally allow using compute_objsize (which
3334 is used internally by check_memop_size) to change code generation
3335 or drive optimization decisions.
3337 In this instance it is safe because the code we generate has
3338 the same semantics regardless of the return value of
3339 check_memop_sizes. Exactly the same amount of data is copied
3340 and the return value is exactly the same in both cases.
3342 Furthermore, check_memop_size always uses mode 0 for the call to
3343 compute_objsize, so the imprecise nature of compute_objsize is
3344 avoided. */
3346 /* Avoid expanding mempcpy into memcpy when the call is determined
3347 to overflow the buffer. This also prevents the same overflow
3348 from being diagnosed again when expanding memcpy. */
3350 return expand_builtin_mempcpy_args (dest, src, len,
3351 target, exp, /*retmode=*/ RETURN_END);
3354 /* Helper function to do the actual work for expand of memory copy family
3355 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3356 of memory from SRC to DEST and assign to TARGET if convenient. Return
3357 value is based on RETMODE argument. */
3359 static rtx
3360 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3361 rtx target, tree exp, memop_ret retmode,
3362 bool might_overlap)
3364 unsigned int src_align = get_pointer_alignment (src);
3365 unsigned int dest_align = get_pointer_alignment (dest);
3366 rtx dest_mem, src_mem, dest_addr, len_rtx;
3367 HOST_WIDE_INT expected_size = -1;
3368 unsigned int expected_align = 0;
3369 unsigned HOST_WIDE_INT min_size;
3370 unsigned HOST_WIDE_INT max_size;
3371 unsigned HOST_WIDE_INT probable_max_size;
3373 bool is_move_done;
3375 /* If DEST is not a pointer type, call the normal function. */
3376 if (dest_align == 0)
3377 return NULL_RTX;
3379 /* If either SRC is not a pointer type, don't do this
3380 operation in-line. */
3381 if (src_align == 0)
3382 return NULL_RTX;
3384 if (currently_expanding_gimple_stmt)
3385 stringop_block_profile (currently_expanding_gimple_stmt,
3386 &expected_align, &expected_size);
3388 if (expected_align < dest_align)
3389 expected_align = dest_align;
3390 dest_mem = get_memory_rtx (dest, len);
3391 set_mem_align (dest_mem, dest_align);
3392 len_rtx = expand_normal (len);
3393 determine_block_size (len, len_rtx, &min_size, &max_size,
3394 &probable_max_size);
3396 /* Try to get the byte representation of the constant SRC points to,
3397 with its byte size in NBYTES. */
3398 unsigned HOST_WIDE_INT nbytes;
3399 const char *rep = getbyterep (src, &nbytes);
3401 /* If the function's constant bound LEN_RTX is less than or equal
3402 to the byte size of the representation of the constant argument,
3403 and if block move would be done by pieces, we can avoid loading
3404 the bytes from memory and only store the computed constant.
3405 This works in the overlap (memmove) case as well because
3406 store_by_pieces just generates a series of stores of constants
3407 from the representation returned by getbyterep(). */
3408 if (rep
3409 && CONST_INT_P (len_rtx)
3410 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes
3411 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3412 CONST_CAST (char *, rep),
3413 dest_align, false))
3415 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3416 builtin_memcpy_read_str,
3417 CONST_CAST (char *, rep),
3418 dest_align, false, retmode);
3419 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3420 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3421 return dest_mem;
3424 src_mem = get_memory_rtx (src, len);
3425 set_mem_align (src_mem, src_align);
3427 /* Copy word part most expediently. */
3428 enum block_op_methods method = BLOCK_OP_NORMAL;
3429 if (CALL_EXPR_TAILCALL (exp)
3430 && (retmode == RETURN_BEGIN || target == const0_rtx))
3431 method = BLOCK_OP_TAILCALL;
3432 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
3433 && retmode == RETURN_END
3434 && !might_overlap
3435 && target != const0_rtx);
3436 if (use_mempcpy_call)
3437 method = BLOCK_OP_NO_LIBCALL_RET;
3438 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3439 expected_align, expected_size,
3440 min_size, max_size, probable_max_size,
3441 use_mempcpy_call, &is_move_done,
3442 might_overlap);
3444 /* Bail out when a mempcpy call would be expanded as libcall and when
3445 we have a target that provides a fast implementation
3446 of mempcpy routine. */
3447 if (!is_move_done)
3448 return NULL_RTX;
3450 if (dest_addr == pc_rtx)
3451 return NULL_RTX;
3453 if (dest_addr == 0)
3455 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3456 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3459 if (retmode != RETURN_BEGIN && target != const0_rtx)
3461 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3462 /* stpcpy pointer to last byte. */
3463 if (retmode == RETURN_END_MINUS_ONE)
3464 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3467 return dest_addr;
3470 static rtx
3471 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3472 rtx target, tree orig_exp, memop_ret retmode)
3474 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3475 retmode, false);
3478 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3479 we failed, the caller should emit a normal call, otherwise try to
3480 get the result in TARGET, if convenient.
3481 Return value is based on RETMODE argument. */
3483 static rtx
3484 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
3486 class expand_operand ops[3];
3487 rtx dest_mem;
3488 rtx src_mem;
3490 if (!targetm.have_movstr ())
3491 return NULL_RTX;
3493 dest_mem = get_memory_rtx (dest, NULL);
3494 src_mem = get_memory_rtx (src, NULL);
3495 if (retmode == RETURN_BEGIN)
3497 target = force_reg (Pmode, XEXP (dest_mem, 0));
3498 dest_mem = replace_equiv_address (dest_mem, target);
3501 create_output_operand (&ops[0],
3502 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
3503 create_fixed_operand (&ops[1], dest_mem);
3504 create_fixed_operand (&ops[2], src_mem);
3505 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3506 return NULL_RTX;
3508 if (retmode != RETURN_BEGIN && target != const0_rtx)
3510 target = ops[0].value;
3511 /* movstr is supposed to set end to the address of the NUL
3512 terminator. If the caller requested a mempcpy-like return value,
3513 adjust it. */
3514 if (retmode == RETURN_END)
3516 rtx tem = plus_constant (GET_MODE (target),
3517 gen_lowpart (GET_MODE (target), target), 1);
3518 emit_move_insn (target, force_operand (tem, NULL_RTX));
3521 return target;
3524 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3525 NULL_RTX if we failed the caller should emit a normal call, otherwise
3526 try to get the result in TARGET, if convenient (and in mode MODE if that's
3527 convenient). */
3529 static rtx
3530 expand_builtin_strcpy (tree exp, rtx target)
3532 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3533 return NULL_RTX;
3535 tree dest = CALL_EXPR_ARG (exp, 0);
3536 tree src = CALL_EXPR_ARG (exp, 1);
3538 return expand_builtin_strcpy_args (exp, dest, src, target);
3541 /* Helper function to do the actual work for expand_builtin_strcpy. The
3542 arguments to the builtin_strcpy call DEST and SRC are broken out
3543 so that this can also be called without constructing an actual CALL_EXPR.
3544 The other arguments and return value are the same as for
3545 expand_builtin_strcpy. */
3547 static rtx
3548 expand_builtin_strcpy_args (tree, tree dest, tree src, rtx target)
3550 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
3553 /* Expand a call EXP to the stpcpy builtin.
3554 Return NULL_RTX if we failed the caller should emit a normal call,
3555 otherwise try to get the result in TARGET, if convenient (and in
3556 mode MODE if that's convenient). */
3558 static rtx
3559 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
3561 tree dst, src;
3562 location_t loc = EXPR_LOCATION (exp);
3564 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3565 return NULL_RTX;
3567 dst = CALL_EXPR_ARG (exp, 0);
3568 src = CALL_EXPR_ARG (exp, 1);
3570 /* If return value is ignored, transform stpcpy into strcpy. */
3571 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3573 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3574 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3575 return expand_expr (result, target, mode, EXPAND_NORMAL);
3577 else
3579 tree len, lenp1;
3580 rtx ret;
3582 /* Ensure we get an actual string whose length can be evaluated at
3583 compile-time, not an expression containing a string. This is
3584 because the latter will potentially produce pessimized code
3585 when used to produce the return value. */
3586 c_strlen_data lendata = { };
3587 if (!c_getstr (src)
3588 || !(len = c_strlen (src, 0, &lendata, 1)))
3589 return expand_movstr (dst, src, target,
3590 /*retmode=*/ RETURN_END_MINUS_ONE);
3592 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3593 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3594 target, exp,
3595 /*retmode=*/ RETURN_END_MINUS_ONE);
3597 if (ret)
3598 return ret;
3600 if (TREE_CODE (len) == INTEGER_CST)
3602 rtx len_rtx = expand_normal (len);
3604 if (CONST_INT_P (len_rtx))
3606 ret = expand_builtin_strcpy_args (exp, dst, src, target);
3608 if (ret)
3610 if (! target)
3612 if (mode != VOIDmode)
3613 target = gen_reg_rtx (mode);
3614 else
3615 target = gen_reg_rtx (GET_MODE (ret));
3617 if (GET_MODE (target) != GET_MODE (ret))
3618 ret = gen_lowpart (GET_MODE (target), ret);
3620 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3621 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3622 gcc_assert (ret);
3624 return target;
3629 return expand_movstr (dst, src, target,
3630 /*retmode=*/ RETURN_END_MINUS_ONE);
3634 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
3635 arguments while being careful to avoid duplicate warnings (which could
3636 be issued if the expander were to expand the call, resulting in it
3637 being emitted in expand_call(). */
3639 static rtx
3640 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3642 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
3644 /* The call has been successfully expanded. Check for nonstring
3645 arguments and issue warnings as appropriate. */
3646 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3647 return ret;
3650 return NULL_RTX;
3653 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3654 bytes from constant string DATA + OFFSET and return it as target
3655 constant. */
3658 builtin_strncpy_read_str (void *data, void *, HOST_WIDE_INT offset,
3659 fixed_size_mode mode)
3661 const char *str = (const char *) data;
3663 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3664 return const0_rtx;
3666 /* The by-pieces infrastructure does not try to pick a vector mode
3667 for strncpy expansion. */
3668 return c_readstr (str + offset, as_a <scalar_int_mode> (mode));
3671 /* Helper to check the sizes of sequences and the destination of calls
3672 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3673 success (no overflow or invalid sizes), false otherwise. */
3675 static bool
3676 check_strncat_sizes (tree exp, tree objsize)
3678 tree dest = CALL_EXPR_ARG (exp, 0);
3679 tree src = CALL_EXPR_ARG (exp, 1);
3680 tree maxread = CALL_EXPR_ARG (exp, 2);
3682 /* Try to determine the range of lengths that the source expression
3683 refers to. */
3684 c_strlen_data lendata = { };
3685 get_range_strlen (src, &lendata, /* eltsize = */ 1);
3687 /* Try to verify that the destination is big enough for the shortest
3688 string. */
3690 access_data data (nullptr, exp, access_read_write, maxread, true);
3691 if (!objsize && warn_stringop_overflow)
3693 /* If it hasn't been provided by __strncat_chk, try to determine
3694 the size of the destination object into which the source is
3695 being copied. */
3696 objsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
3699 /* Add one for the terminating nul. */
3700 tree srclen = (lendata.minlen
3701 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
3702 size_one_node)
3703 : NULL_TREE);
3705 /* The strncat function copies at most MAXREAD bytes and always appends
3706 the terminating nul so the specified upper bound should never be equal
3707 to (or greater than) the size of the destination. */
3708 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
3709 && tree_int_cst_equal (objsize, maxread))
3711 location_t loc = EXPR_LOCATION (exp);
3712 warning_at (loc, OPT_Wstringop_overflow_,
3713 "%qD specified bound %E equals destination size",
3714 get_callee_fndecl (exp), maxread);
3716 return false;
3719 if (!srclen
3720 || (maxread && tree_fits_uhwi_p (maxread)
3721 && tree_fits_uhwi_p (srclen)
3722 && tree_int_cst_lt (maxread, srclen)))
3723 srclen = maxread;
3725 /* The number of bytes to write is LEN but check_access will alsoa
3726 check SRCLEN if LEN's value isn't known. */
3727 return check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
3728 objsize, data.mode, &data);
3731 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3732 NULL_RTX if we failed the caller should emit a normal call. */
3734 static rtx
3735 expand_builtin_strncpy (tree exp, rtx target)
3737 location_t loc = EXPR_LOCATION (exp);
3739 if (!validate_arglist (exp,
3740 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3741 return NULL_RTX;
3742 tree dest = CALL_EXPR_ARG (exp, 0);
3743 tree src = CALL_EXPR_ARG (exp, 1);
3744 /* The number of bytes to write (not the maximum). */
3745 tree len = CALL_EXPR_ARG (exp, 2);
3747 /* The length of the source sequence. */
3748 tree slen = c_strlen (src, 1);
3750 /* We must be passed a constant len and src parameter. */
3751 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3752 return NULL_RTX;
3754 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3756 /* We're required to pad with trailing zeros if the requested
3757 len is greater than strlen(s2)+1. In that case try to
3758 use store_by_pieces, if it fails, punt. */
3759 if (tree_int_cst_lt (slen, len))
3761 unsigned int dest_align = get_pointer_alignment (dest);
3762 const char *p = c_getstr (src);
3763 rtx dest_mem;
3765 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3766 || !can_store_by_pieces (tree_to_uhwi (len),
3767 builtin_strncpy_read_str,
3768 CONST_CAST (char *, p),
3769 dest_align, false))
3770 return NULL_RTX;
3772 dest_mem = get_memory_rtx (dest, len);
3773 store_by_pieces (dest_mem, tree_to_uhwi (len),
3774 builtin_strncpy_read_str,
3775 CONST_CAST (char *, p), dest_align, false,
3776 RETURN_BEGIN);
3777 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3778 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3779 return dest_mem;
3782 return NULL_RTX;
3785 /* Return the RTL of a register in MODE generated from PREV in the
3786 previous iteration. */
3788 static rtx
3789 gen_memset_value_from_prev (by_pieces_prev *prev, fixed_size_mode mode)
3791 rtx target = nullptr;
3792 if (prev != nullptr && prev->data != nullptr)
3794 /* Use the previous data in the same mode. */
3795 if (prev->mode == mode)
3796 return prev->data;
3798 fixed_size_mode prev_mode = prev->mode;
3800 /* Don't use the previous data to write QImode if it is in a
3801 vector mode. */
3802 if (VECTOR_MODE_P (prev_mode) && mode == QImode)
3803 return target;
3805 rtx prev_rtx = prev->data;
3807 if (REG_P (prev_rtx)
3808 && HARD_REGISTER_P (prev_rtx)
3809 && lowpart_subreg_regno (REGNO (prev_rtx), prev_mode, mode) < 0)
3811 /* This case occurs when PREV_MODE is a vector and when
3812 MODE is too small to store using vector operations.
3813 After register allocation, the code will need to move the
3814 lowpart of the vector register into a non-vector register.
3816 Also, the target has chosen to use a hard register
3817 instead of going with the default choice of using a
3818 pseudo register. We should respect that choice and try to
3819 avoid creating a pseudo register with the same mode as the
3820 current hard register.
3822 In principle, we could just use a lowpart MODE subreg of
3823 the vector register. However, the vector register mode might
3824 be too wide for non-vector registers, and we already know
3825 that the non-vector mode is too small for vector registers.
3826 It's therefore likely that we'd need to spill to memory in
3827 the vector mode and reload the non-vector value from there.
3829 Try to avoid that by reducing the vector register to the
3830 smallest size that it can hold. This should increase the
3831 chances that non-vector registers can hold both the inner
3832 and outer modes of the subreg that we generate later. */
3833 machine_mode m;
3834 fixed_size_mode candidate;
3835 FOR_EACH_MODE_IN_CLASS (m, GET_MODE_CLASS (mode))
3836 if (is_a<fixed_size_mode> (m, &candidate))
3838 if (GET_MODE_SIZE (candidate)
3839 >= GET_MODE_SIZE (prev_mode))
3840 break;
3841 if (GET_MODE_SIZE (candidate) >= GET_MODE_SIZE (mode)
3842 && lowpart_subreg_regno (REGNO (prev_rtx),
3843 prev_mode, candidate) >= 0)
3845 target = lowpart_subreg (candidate, prev_rtx,
3846 prev_mode);
3847 prev_rtx = target;
3848 prev_mode = candidate;
3849 break;
3852 if (target == nullptr)
3853 prev_rtx = copy_to_reg (prev_rtx);
3856 target = lowpart_subreg (mode, prev_rtx, prev_mode);
3858 return target;
3861 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3862 bytes from constant string DATA + OFFSET and return it as target
3863 constant. If PREV isn't nullptr, it has the RTL info from the
3864 previous iteration. */
3867 builtin_memset_read_str (void *data, void *prev,
3868 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3869 fixed_size_mode mode)
3871 const char *c = (const char *) data;
3872 unsigned int size = GET_MODE_SIZE (mode);
3874 rtx target = gen_memset_value_from_prev ((by_pieces_prev *) prev,
3875 mode);
3876 if (target != nullptr)
3877 return target;
3878 rtx src = gen_int_mode (*c, QImode);
3880 if (VECTOR_MODE_P (mode))
3882 gcc_assert (GET_MODE_INNER (mode) == QImode);
3884 rtx const_vec = gen_const_vec_duplicate (mode, src);
3885 if (prev == NULL)
3886 /* Return CONST_VECTOR when called by a query function. */
3887 return const_vec;
3889 /* Use the move expander with CONST_VECTOR. */
3890 target = targetm.gen_memset_scratch_rtx (mode);
3891 emit_move_insn (target, const_vec);
3892 return target;
3895 char *p = XALLOCAVEC (char, size);
3897 memset (p, *c, size);
3899 /* Vector modes should be handled above. */
3900 return c_readstr (p, as_a <scalar_int_mode> (mode));
3903 /* Callback routine for store_by_pieces. Return the RTL of a register
3904 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3905 char value given in the RTL register data. For example, if mode is
3906 4 bytes wide, return the RTL for 0x01010101*data. If PREV isn't
3907 nullptr, it has the RTL info from the previous iteration. */
3909 static rtx
3910 builtin_memset_gen_str (void *data, void *prev,
3911 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3912 fixed_size_mode mode)
3914 rtx target, coeff;
3915 size_t size;
3916 char *p;
3918 size = GET_MODE_SIZE (mode);
3919 if (size == 1)
3920 return (rtx) data;
3922 target = gen_memset_value_from_prev ((by_pieces_prev *) prev, mode);
3923 if (target != nullptr)
3924 return target;
3926 if (VECTOR_MODE_P (mode))
3928 gcc_assert (GET_MODE_INNER (mode) == QImode);
3930 /* vec_duplicate_optab is a precondition to pick a vector mode for
3931 the memset expander. */
3932 insn_code icode = optab_handler (vec_duplicate_optab, mode);
3934 target = targetm.gen_memset_scratch_rtx (mode);
3935 class expand_operand ops[2];
3936 create_output_operand (&ops[0], target, mode);
3937 create_input_operand (&ops[1], (rtx) data, QImode);
3938 expand_insn (icode, 2, ops);
3939 if (!rtx_equal_p (target, ops[0].value))
3940 emit_move_insn (target, ops[0].value);
3942 return target;
3945 p = XALLOCAVEC (char, size);
3946 memset (p, 1, size);
3947 /* Vector modes should be handled above. */
3948 coeff = c_readstr (p, as_a <scalar_int_mode> (mode));
3950 target = convert_to_mode (mode, (rtx) data, 1);
3951 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3952 return force_reg (mode, target);
3955 /* Expand expression EXP, which is a call to the memset builtin. Return
3956 NULL_RTX if we failed the caller should emit a normal call, otherwise
3957 try to get the result in TARGET, if convenient (and in mode MODE if that's
3958 convenient). */
3961 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3963 if (!validate_arglist (exp,
3964 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3965 return NULL_RTX;
3967 tree dest = CALL_EXPR_ARG (exp, 0);
3968 tree val = CALL_EXPR_ARG (exp, 1);
3969 tree len = CALL_EXPR_ARG (exp, 2);
3971 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3974 /* Try to store VAL (or, if NULL_RTX, VALC) in LEN bytes starting at TO.
3975 Return TRUE if successful, FALSE otherwise. TO is assumed to be
3976 aligned at an ALIGN-bits boundary. LEN must be a multiple of
3977 1<<CTZ_LEN between MIN_LEN and MAX_LEN.
3979 The strategy is to issue one store_by_pieces for each power of two,
3980 from most to least significant, guarded by a test on whether there
3981 are at least that many bytes left to copy in LEN.
3983 ??? Should we skip some powers of two in favor of loops? Maybe start
3984 at the max of TO/LEN/word alignment, at least when optimizing for
3985 size, instead of ensuring O(log len) dynamic compares? */
3987 bool
3988 try_store_by_multiple_pieces (rtx to, rtx len, unsigned int ctz_len,
3989 unsigned HOST_WIDE_INT min_len,
3990 unsigned HOST_WIDE_INT max_len,
3991 rtx val, char valc, unsigned int align)
3993 int max_bits = floor_log2 (max_len);
3994 int min_bits = floor_log2 (min_len);
3995 int sctz_len = ctz_len;
3997 gcc_checking_assert (sctz_len >= 0);
3999 if (val)
4000 valc = 1;
4002 /* Bits more significant than TST_BITS are part of the shared prefix
4003 in the binary representation of both min_len and max_len. Since
4004 they're identical, we don't need to test them in the loop. */
4005 int tst_bits = (max_bits != min_bits ? max_bits
4006 : floor_log2 (max_len ^ min_len));
4008 /* Check whether it's profitable to start by storing a fixed BLKSIZE
4009 bytes, to lower max_bits. In the unlikely case of a constant LEN
4010 (implied by identical MAX_LEN and MIN_LEN), we want to issue a
4011 single store_by_pieces, but otherwise, select the minimum multiple
4012 of the ALIGN (in bytes) and of the MCD of the possible LENs, that
4013 brings MAX_LEN below TST_BITS, if that's lower than min_len. */
4014 unsigned HOST_WIDE_INT blksize;
4015 if (max_len > min_len)
4017 unsigned HOST_WIDE_INT alrng = MAX (HOST_WIDE_INT_1U << ctz_len,
4018 align / BITS_PER_UNIT);
4019 blksize = max_len - (HOST_WIDE_INT_1U << tst_bits) + alrng;
4020 blksize &= ~(alrng - 1);
4022 else if (max_len == min_len)
4023 blksize = max_len;
4024 else
4025 /* Huh, max_len < min_len? Punt. See pr100843.c. */
4026 return false;
4027 if (min_len >= blksize)
4029 min_len -= blksize;
4030 min_bits = floor_log2 (min_len);
4031 max_len -= blksize;
4032 max_bits = floor_log2 (max_len);
4034 tst_bits = (max_bits != min_bits ? max_bits
4035 : floor_log2 (max_len ^ min_len));
4037 else
4038 blksize = 0;
4040 /* Check that we can use store by pieces for the maximum store count
4041 we may issue (initial fixed-size block, plus conditional
4042 power-of-two-sized from max_bits to ctz_len. */
4043 unsigned HOST_WIDE_INT xlenest = blksize;
4044 if (max_bits >= 0)
4045 xlenest += ((HOST_WIDE_INT_1U << max_bits) * 2
4046 - (HOST_WIDE_INT_1U << ctz_len));
4047 if (!can_store_by_pieces (xlenest, builtin_memset_read_str,
4048 &valc, align, true))
4049 return false;
4051 by_pieces_constfn constfun;
4052 void *constfundata;
4053 if (val)
4055 constfun = builtin_memset_gen_str;
4056 constfundata = val = force_reg (TYPE_MODE (unsigned_char_type_node),
4057 val);
4059 else
4061 constfun = builtin_memset_read_str;
4062 constfundata = &valc;
4065 rtx ptr = copy_addr_to_reg (XEXP (to, 0));
4066 rtx rem = copy_to_mode_reg (ptr_mode, convert_to_mode (ptr_mode, len, 0));
4067 to = replace_equiv_address (to, ptr);
4068 set_mem_align (to, align);
4070 if (blksize)
4072 to = store_by_pieces (to, blksize,
4073 constfun, constfundata,
4074 align, true,
4075 max_len != 0 ? RETURN_END : RETURN_BEGIN);
4076 if (max_len == 0)
4077 return true;
4079 /* Adjust PTR, TO and REM. Since TO's address is likely
4080 PTR+offset, we have to replace it. */
4081 emit_move_insn (ptr, force_operand (XEXP (to, 0), NULL_RTX));
4082 to = replace_equiv_address (to, ptr);
4083 rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
4084 emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
4087 /* Iterate over power-of-two block sizes from the maximum length to
4088 the least significant bit possibly set in the length. */
4089 for (int i = max_bits; i >= sctz_len; i--)
4091 rtx_code_label *label = NULL;
4092 blksize = HOST_WIDE_INT_1U << i;
4094 /* If we're past the bits shared between min_ and max_len, expand
4095 a test on the dynamic length, comparing it with the
4096 BLKSIZE. */
4097 if (i <= tst_bits)
4099 label = gen_label_rtx ();
4100 emit_cmp_and_jump_insns (rem, GEN_INT (blksize), LT, NULL,
4101 ptr_mode, 1, label,
4102 profile_probability::even ());
4104 /* If we are at a bit that is in the prefix shared by min_ and
4105 max_len, skip this BLKSIZE if the bit is clear. */
4106 else if ((max_len & blksize) == 0)
4107 continue;
4109 /* Issue a store of BLKSIZE bytes. */
4110 to = store_by_pieces (to, blksize,
4111 constfun, constfundata,
4112 align, true,
4113 i != sctz_len ? RETURN_END : RETURN_BEGIN);
4115 /* Adjust REM and PTR, unless this is the last iteration. */
4116 if (i != sctz_len)
4118 emit_move_insn (ptr, force_operand (XEXP (to, 0), NULL_RTX));
4119 to = replace_equiv_address (to, ptr);
4120 rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
4121 emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
4124 if (label)
4126 emit_label (label);
4128 /* Given conditional stores, the offset can no longer be
4129 known, so clear it. */
4130 clear_mem_offset (to);
4134 return true;
4137 /* Helper function to do the actual work for expand_builtin_memset. The
4138 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4139 so that this can also be called without constructing an actual CALL_EXPR.
4140 The other arguments and return value are the same as for
4141 expand_builtin_memset. */
4143 static rtx
4144 expand_builtin_memset_args (tree dest, tree val, tree len,
4145 rtx target, machine_mode mode, tree orig_exp)
4147 tree fndecl, fn;
4148 enum built_in_function fcode;
4149 machine_mode val_mode;
4150 char c;
4151 unsigned int dest_align;
4152 rtx dest_mem, dest_addr, len_rtx;
4153 HOST_WIDE_INT expected_size = -1;
4154 unsigned int expected_align = 0;
4155 unsigned HOST_WIDE_INT min_size;
4156 unsigned HOST_WIDE_INT max_size;
4157 unsigned HOST_WIDE_INT probable_max_size;
4159 dest_align = get_pointer_alignment (dest);
4161 /* If DEST is not a pointer type, don't do this operation in-line. */
4162 if (dest_align == 0)
4163 return NULL_RTX;
4165 if (currently_expanding_gimple_stmt)
4166 stringop_block_profile (currently_expanding_gimple_stmt,
4167 &expected_align, &expected_size);
4169 if (expected_align < dest_align)
4170 expected_align = dest_align;
4172 /* If the LEN parameter is zero, return DEST. */
4173 if (integer_zerop (len))
4175 /* Evaluate and ignore VAL in case it has side-effects. */
4176 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4177 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4180 /* Stabilize the arguments in case we fail. */
4181 dest = builtin_save_expr (dest);
4182 val = builtin_save_expr (val);
4183 len = builtin_save_expr (len);
4185 len_rtx = expand_normal (len);
4186 determine_block_size (len, len_rtx, &min_size, &max_size,
4187 &probable_max_size);
4188 dest_mem = get_memory_rtx (dest, len);
4189 val_mode = TYPE_MODE (unsigned_char_type_node);
4191 if (TREE_CODE (val) != INTEGER_CST
4192 || target_char_cast (val, &c))
4194 rtx val_rtx;
4196 val_rtx = expand_normal (val);
4197 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4199 /* Assume that we can memset by pieces if we can store
4200 * the coefficients by pieces (in the required modes).
4201 * We can't pass builtin_memset_gen_str as that emits RTL. */
4202 c = 1;
4203 if (tree_fits_uhwi_p (len)
4204 && can_store_by_pieces (tree_to_uhwi (len),
4205 builtin_memset_read_str, &c, dest_align,
4206 true))
4208 val_rtx = force_reg (val_mode, val_rtx);
4209 store_by_pieces (dest_mem, tree_to_uhwi (len),
4210 builtin_memset_gen_str, val_rtx, dest_align,
4211 true, RETURN_BEGIN);
4213 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4214 dest_align, expected_align,
4215 expected_size, min_size, max_size,
4216 probable_max_size)
4217 && !try_store_by_multiple_pieces (dest_mem, len_rtx,
4218 tree_ctz (len),
4219 min_size, max_size,
4220 val_rtx, 0,
4221 dest_align))
4222 goto do_libcall;
4224 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4225 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4226 return dest_mem;
4229 if (c)
4231 if (tree_fits_uhwi_p (len)
4232 && can_store_by_pieces (tree_to_uhwi (len),
4233 builtin_memset_read_str, &c, dest_align,
4234 true))
4235 store_by_pieces (dest_mem, tree_to_uhwi (len),
4236 builtin_memset_read_str, &c, dest_align, true,
4237 RETURN_BEGIN);
4238 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4239 gen_int_mode (c, val_mode),
4240 dest_align, expected_align,
4241 expected_size, min_size, max_size,
4242 probable_max_size)
4243 && !try_store_by_multiple_pieces (dest_mem, len_rtx,
4244 tree_ctz (len),
4245 min_size, max_size,
4246 NULL_RTX, c,
4247 dest_align))
4248 goto do_libcall;
4250 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4251 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4252 return dest_mem;
4255 set_mem_align (dest_mem, dest_align);
4256 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4257 CALL_EXPR_TAILCALL (orig_exp)
4258 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4259 expected_align, expected_size,
4260 min_size, max_size,
4261 probable_max_size, tree_ctz (len));
4263 if (dest_addr == 0)
4265 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4266 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4269 return dest_addr;
4271 do_libcall:
4272 fndecl = get_callee_fndecl (orig_exp);
4273 fcode = DECL_FUNCTION_CODE (fndecl);
4274 if (fcode == BUILT_IN_MEMSET)
4275 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4276 dest, val, len);
4277 else if (fcode == BUILT_IN_BZERO)
4278 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4279 dest, len);
4280 else
4281 gcc_unreachable ();
4282 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4283 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4284 return expand_call (fn, target, target == const0_rtx);
4287 /* Expand expression EXP, which is a call to the bzero builtin. Return
4288 NULL_RTX if we failed the caller should emit a normal call. */
4290 static rtx
4291 expand_builtin_bzero (tree exp)
4293 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4294 return NULL_RTX;
4296 tree dest = CALL_EXPR_ARG (exp, 0);
4297 tree size = CALL_EXPR_ARG (exp, 1);
4299 /* New argument list transforming bzero(ptr x, int y) to
4300 memset(ptr x, int 0, size_t y). This is done this way
4301 so that if it isn't expanded inline, we fallback to
4302 calling bzero instead of memset. */
4304 location_t loc = EXPR_LOCATION (exp);
4306 return expand_builtin_memset_args (dest, integer_zero_node,
4307 fold_convert_loc (loc,
4308 size_type_node, size),
4309 const0_rtx, VOIDmode, exp);
4312 /* Try to expand cmpstr operation ICODE with the given operands.
4313 Return the result rtx on success, otherwise return null. */
4315 static rtx
4316 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4317 HOST_WIDE_INT align)
4319 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4321 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4322 target = NULL_RTX;
4324 class expand_operand ops[4];
4325 create_output_operand (&ops[0], target, insn_mode);
4326 create_fixed_operand (&ops[1], arg1_rtx);
4327 create_fixed_operand (&ops[2], arg2_rtx);
4328 create_integer_operand (&ops[3], align);
4329 if (maybe_expand_insn (icode, 4, ops))
4330 return ops[0].value;
4331 return NULL_RTX;
4334 /* Expand expression EXP, which is a call to the memcmp built-in function.
4335 Return NULL_RTX if we failed and the caller should emit a normal call,
4336 otherwise try to get the result in TARGET, if convenient.
4337 RESULT_EQ is true if we can relax the returned value to be either zero
4338 or nonzero, without caring about the sign. */
4340 static rtx
4341 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4343 if (!validate_arglist (exp,
4344 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4345 return NULL_RTX;
4347 tree arg1 = CALL_EXPR_ARG (exp, 0);
4348 tree arg2 = CALL_EXPR_ARG (exp, 1);
4349 tree len = CALL_EXPR_ARG (exp, 2);
4351 /* Due to the performance benefit, always inline the calls first
4352 when result_eq is false. */
4353 rtx result = NULL_RTX;
4354 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4355 if (!result_eq && fcode != BUILT_IN_BCMP)
4357 result = inline_expand_builtin_bytecmp (exp, target);
4358 if (result)
4359 return result;
4362 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4363 location_t loc = EXPR_LOCATION (exp);
4365 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4366 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4368 /* If we don't have POINTER_TYPE, call the function. */
4369 if (arg1_align == 0 || arg2_align == 0)
4370 return NULL_RTX;
4372 rtx arg1_rtx = get_memory_rtx (arg1, len);
4373 rtx arg2_rtx = get_memory_rtx (arg2, len);
4374 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4376 /* Set MEM_SIZE as appropriate. */
4377 if (CONST_INT_P (len_rtx))
4379 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4380 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4383 by_pieces_constfn constfn = NULL;
4385 /* Try to get the byte representation of the constant ARG2 (or, only
4386 when the function's result is used for equality to zero, ARG1)
4387 points to, with its byte size in NBYTES. */
4388 unsigned HOST_WIDE_INT nbytes;
4389 const char *rep = getbyterep (arg2, &nbytes);
4390 if (result_eq && rep == NULL)
4392 /* For equality to zero the arguments are interchangeable. */
4393 rep = getbyterep (arg1, &nbytes);
4394 if (rep != NULL)
4395 std::swap (arg1_rtx, arg2_rtx);
4398 /* If the function's constant bound LEN_RTX is less than or equal
4399 to the byte size of the representation of the constant argument,
4400 and if block move would be done by pieces, we can avoid loading
4401 the bytes from memory and only store the computed constant result. */
4402 if (rep
4403 && CONST_INT_P (len_rtx)
4404 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes)
4405 constfn = builtin_memcpy_read_str;
4407 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4408 TREE_TYPE (len), target,
4409 result_eq, constfn,
4410 CONST_CAST (char *, rep));
4412 if (result)
4414 /* Return the value in the proper mode for this function. */
4415 if (GET_MODE (result) == mode)
4416 return result;
4418 if (target != 0)
4420 convert_move (target, result, 0);
4421 return target;
4424 return convert_to_mode (mode, result, 0);
4427 return NULL_RTX;
4430 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4431 if we failed the caller should emit a normal call, otherwise try to get
4432 the result in TARGET, if convenient. */
4434 static rtx
4435 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4437 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4438 return NULL_RTX;
4440 tree arg1 = CALL_EXPR_ARG (exp, 0);
4441 tree arg2 = CALL_EXPR_ARG (exp, 1);
4443 /* Due to the performance benefit, always inline the calls first. */
4444 rtx result = NULL_RTX;
4445 result = inline_expand_builtin_bytecmp (exp, target);
4446 if (result)
4447 return result;
4449 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4450 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4451 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4452 return NULL_RTX;
4454 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4455 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4457 /* If we don't have POINTER_TYPE, call the function. */
4458 if (arg1_align == 0 || arg2_align == 0)
4459 return NULL_RTX;
4461 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4462 arg1 = builtin_save_expr (arg1);
4463 arg2 = builtin_save_expr (arg2);
4465 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4466 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4468 /* Try to call cmpstrsi. */
4469 if (cmpstr_icode != CODE_FOR_nothing)
4470 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4471 MIN (arg1_align, arg2_align));
4473 /* Try to determine at least one length and call cmpstrnsi. */
4474 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4476 tree len;
4477 rtx arg3_rtx;
4479 tree len1 = c_strlen (arg1, 1);
4480 tree len2 = c_strlen (arg2, 1);
4482 if (len1)
4483 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4484 if (len2)
4485 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4487 /* If we don't have a constant length for the first, use the length
4488 of the second, if we know it. We don't require a constant for
4489 this case; some cost analysis could be done if both are available
4490 but neither is constant. For now, assume they're equally cheap,
4491 unless one has side effects. If both strings have constant lengths,
4492 use the smaller. */
4494 if (!len1)
4495 len = len2;
4496 else if (!len2)
4497 len = len1;
4498 else if (TREE_SIDE_EFFECTS (len1))
4499 len = len2;
4500 else if (TREE_SIDE_EFFECTS (len2))
4501 len = len1;
4502 else if (TREE_CODE (len1) != INTEGER_CST)
4503 len = len2;
4504 else if (TREE_CODE (len2) != INTEGER_CST)
4505 len = len1;
4506 else if (tree_int_cst_lt (len1, len2))
4507 len = len1;
4508 else
4509 len = len2;
4511 /* If both arguments have side effects, we cannot optimize. */
4512 if (len && !TREE_SIDE_EFFECTS (len))
4514 arg3_rtx = expand_normal (len);
4515 result = expand_cmpstrn_or_cmpmem
4516 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4517 arg3_rtx, MIN (arg1_align, arg2_align));
4521 tree fndecl = get_callee_fndecl (exp);
4522 if (result)
4524 /* Return the value in the proper mode for this function. */
4525 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4526 if (GET_MODE (result) == mode)
4527 return result;
4528 if (target == 0)
4529 return convert_to_mode (mode, result, 0);
4530 convert_move (target, result, 0);
4531 return target;
4534 /* Expand the library call ourselves using a stabilized argument
4535 list to avoid re-evaluating the function's arguments twice. */
4536 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4537 copy_warning (fn, exp);
4538 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4539 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4540 return expand_call (fn, target, target == const0_rtx);
4543 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4544 NULL_RTX if we failed the caller should emit a normal call, otherwise
4545 try to get the result in TARGET, if convenient. */
4547 static rtx
4548 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4549 ATTRIBUTE_UNUSED machine_mode mode)
4551 if (!validate_arglist (exp,
4552 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4553 return NULL_RTX;
4555 tree arg1 = CALL_EXPR_ARG (exp, 0);
4556 tree arg2 = CALL_EXPR_ARG (exp, 1);
4557 tree arg3 = CALL_EXPR_ARG (exp, 2);
4559 location_t loc = EXPR_LOCATION (exp);
4560 tree len1 = c_strlen (arg1, 1);
4561 tree len2 = c_strlen (arg2, 1);
4563 /* Due to the performance benefit, always inline the calls first. */
4564 rtx result = NULL_RTX;
4565 result = inline_expand_builtin_bytecmp (exp, target);
4566 if (result)
4567 return result;
4569 /* If c_strlen can determine an expression for one of the string
4570 lengths, and it doesn't have side effects, then emit cmpstrnsi
4571 using length MIN(strlen(string)+1, arg3). */
4572 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4573 if (cmpstrn_icode == CODE_FOR_nothing)
4574 return NULL_RTX;
4576 tree len;
4578 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4579 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4581 if (len1)
4582 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4583 if (len2)
4584 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4586 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4588 /* If we don't have a constant length for the first, use the length
4589 of the second, if we know it. If neither string is constant length,
4590 use the given length argument. We don't require a constant for
4591 this case; some cost analysis could be done if both are available
4592 but neither is constant. For now, assume they're equally cheap,
4593 unless one has side effects. If both strings have constant lengths,
4594 use the smaller. */
4596 if (!len1 && !len2)
4597 len = len3;
4598 else if (!len1)
4599 len = len2;
4600 else if (!len2)
4601 len = len1;
4602 else if (TREE_SIDE_EFFECTS (len1))
4603 len = len2;
4604 else if (TREE_SIDE_EFFECTS (len2))
4605 len = len1;
4606 else if (TREE_CODE (len1) != INTEGER_CST)
4607 len = len2;
4608 else if (TREE_CODE (len2) != INTEGER_CST)
4609 len = len1;
4610 else if (tree_int_cst_lt (len1, len2))
4611 len = len1;
4612 else
4613 len = len2;
4615 /* If we are not using the given length, we must incorporate it here.
4616 The actual new length parameter will be MIN(len,arg3) in this case. */
4617 if (len != len3)
4619 len = fold_convert_loc (loc, sizetype, len);
4620 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4622 rtx arg1_rtx = get_memory_rtx (arg1, len);
4623 rtx arg2_rtx = get_memory_rtx (arg2, len);
4624 rtx arg3_rtx = expand_normal (len);
4625 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4626 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4627 MIN (arg1_align, arg2_align));
4629 tree fndecl = get_callee_fndecl (exp);
4630 if (result)
4632 /* Return the value in the proper mode for this function. */
4633 mode = TYPE_MODE (TREE_TYPE (exp));
4634 if (GET_MODE (result) == mode)
4635 return result;
4636 if (target == 0)
4637 return convert_to_mode (mode, result, 0);
4638 convert_move (target, result, 0);
4639 return target;
4642 /* Expand the library call ourselves using a stabilized argument
4643 list to avoid re-evaluating the function's arguments twice. */
4644 tree call = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4645 copy_warning (call, exp);
4646 gcc_assert (TREE_CODE (call) == CALL_EXPR);
4647 CALL_EXPR_TAILCALL (call) = CALL_EXPR_TAILCALL (exp);
4648 return expand_call (call, target, target == const0_rtx);
4651 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4652 if that's convenient. */
4655 expand_builtin_saveregs (void)
4657 rtx val;
4658 rtx_insn *seq;
4660 /* Don't do __builtin_saveregs more than once in a function.
4661 Save the result of the first call and reuse it. */
4662 if (saveregs_value != 0)
4663 return saveregs_value;
4665 /* When this function is called, it means that registers must be
4666 saved on entry to this function. So we migrate the call to the
4667 first insn of this function. */
4669 start_sequence ();
4671 /* Do whatever the machine needs done in this case. */
4672 val = targetm.calls.expand_builtin_saveregs ();
4674 seq = get_insns ();
4675 end_sequence ();
4677 saveregs_value = val;
4679 /* Put the insns after the NOTE that starts the function. If this
4680 is inside a start_sequence, make the outer-level insn chain current, so
4681 the code is placed at the start of the function. */
4682 push_topmost_sequence ();
4683 emit_insn_after (seq, entry_of_function ());
4684 pop_topmost_sequence ();
4686 return val;
4689 /* Expand a call to __builtin_next_arg. */
4691 static rtx
4692 expand_builtin_next_arg (void)
4694 /* Checking arguments is already done in fold_builtin_next_arg
4695 that must be called before this function. */
4696 return expand_binop (ptr_mode, add_optab,
4697 crtl->args.internal_arg_pointer,
4698 crtl->args.arg_offset_rtx,
4699 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4702 /* Make it easier for the backends by protecting the valist argument
4703 from multiple evaluations. */
4705 static tree
4706 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4708 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4710 /* The current way of determining the type of valist is completely
4711 bogus. We should have the information on the va builtin instead. */
4712 if (!vatype)
4713 vatype = targetm.fn_abi_va_list (cfun->decl);
4715 if (TREE_CODE (vatype) == ARRAY_TYPE)
4717 if (TREE_SIDE_EFFECTS (valist))
4718 valist = save_expr (valist);
4720 /* For this case, the backends will be expecting a pointer to
4721 vatype, but it's possible we've actually been given an array
4722 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4723 So fix it. */
4724 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4726 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4727 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4730 else
4732 tree pt = build_pointer_type (vatype);
4734 if (! needs_lvalue)
4736 if (! TREE_SIDE_EFFECTS (valist))
4737 return valist;
4739 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4740 TREE_SIDE_EFFECTS (valist) = 1;
4743 if (TREE_SIDE_EFFECTS (valist))
4744 valist = save_expr (valist);
4745 valist = fold_build2_loc (loc, MEM_REF,
4746 vatype, valist, build_int_cst (pt, 0));
4749 return valist;
4752 /* The "standard" definition of va_list is void*. */
4754 tree
4755 std_build_builtin_va_list (void)
4757 return ptr_type_node;
4760 /* The "standard" abi va_list is va_list_type_node. */
4762 tree
4763 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4765 return va_list_type_node;
4768 /* The "standard" type of va_list is va_list_type_node. */
4770 tree
4771 std_canonical_va_list_type (tree type)
4773 tree wtype, htype;
4775 wtype = va_list_type_node;
4776 htype = type;
4778 if (TREE_CODE (wtype) == ARRAY_TYPE)
4780 /* If va_list is an array type, the argument may have decayed
4781 to a pointer type, e.g. by being passed to another function.
4782 In that case, unwrap both types so that we can compare the
4783 underlying records. */
4784 if (TREE_CODE (htype) == ARRAY_TYPE
4785 || POINTER_TYPE_P (htype))
4787 wtype = TREE_TYPE (wtype);
4788 htype = TREE_TYPE (htype);
4791 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4792 return va_list_type_node;
4794 return NULL_TREE;
4797 /* The "standard" implementation of va_start: just assign `nextarg' to
4798 the variable. */
4800 void
4801 std_expand_builtin_va_start (tree valist, rtx nextarg)
4803 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4804 convert_move (va_r, nextarg, 0);
4807 /* Expand EXP, a call to __builtin_va_start. */
4809 static rtx
4810 expand_builtin_va_start (tree exp)
4812 rtx nextarg;
4813 tree valist;
4814 location_t loc = EXPR_LOCATION (exp);
4816 if (call_expr_nargs (exp) < 2)
4818 error_at (loc, "too few arguments to function %<va_start%>");
4819 return const0_rtx;
4822 if (fold_builtin_next_arg (exp, true))
4823 return const0_rtx;
4825 nextarg = expand_builtin_next_arg ();
4826 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4828 if (targetm.expand_builtin_va_start)
4829 targetm.expand_builtin_va_start (valist, nextarg);
4830 else
4831 std_expand_builtin_va_start (valist, nextarg);
4833 return const0_rtx;
4836 /* Expand EXP, a call to __builtin_va_end. */
4838 static rtx
4839 expand_builtin_va_end (tree exp)
4841 tree valist = CALL_EXPR_ARG (exp, 0);
4843 /* Evaluate for side effects, if needed. I hate macros that don't
4844 do that. */
4845 if (TREE_SIDE_EFFECTS (valist))
4846 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4848 return const0_rtx;
4851 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4852 builtin rather than just as an assignment in stdarg.h because of the
4853 nastiness of array-type va_list types. */
4855 static rtx
4856 expand_builtin_va_copy (tree exp)
4858 tree dst, src, t;
4859 location_t loc = EXPR_LOCATION (exp);
4861 dst = CALL_EXPR_ARG (exp, 0);
4862 src = CALL_EXPR_ARG (exp, 1);
4864 dst = stabilize_va_list_loc (loc, dst, 1);
4865 src = stabilize_va_list_loc (loc, src, 0);
4867 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4869 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4871 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4872 TREE_SIDE_EFFECTS (t) = 1;
4873 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4875 else
4877 rtx dstb, srcb, size;
4879 /* Evaluate to pointers. */
4880 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4881 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4882 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4883 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4885 dstb = convert_memory_address (Pmode, dstb);
4886 srcb = convert_memory_address (Pmode, srcb);
4888 /* "Dereference" to BLKmode memories. */
4889 dstb = gen_rtx_MEM (BLKmode, dstb);
4890 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4891 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4892 srcb = gen_rtx_MEM (BLKmode, srcb);
4893 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4894 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4896 /* Copy. */
4897 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4900 return const0_rtx;
4903 /* Expand a call to one of the builtin functions __builtin_frame_address or
4904 __builtin_return_address. */
4906 static rtx
4907 expand_builtin_frame_address (tree fndecl, tree exp)
4909 /* The argument must be a nonnegative integer constant.
4910 It counts the number of frames to scan up the stack.
4911 The value is either the frame pointer value or the return
4912 address saved in that frame. */
4913 if (call_expr_nargs (exp) == 0)
4914 /* Warning about missing arg was already issued. */
4915 return const0_rtx;
4916 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4918 error ("invalid argument to %qD", fndecl);
4919 return const0_rtx;
4921 else
4923 /* Number of frames to scan up the stack. */
4924 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4926 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4928 /* Some ports cannot access arbitrary stack frames. */
4929 if (tem == NULL)
4931 warning (0, "unsupported argument to %qD", fndecl);
4932 return const0_rtx;
4935 if (count)
4937 /* Warn since no effort is made to ensure that any frame
4938 beyond the current one exists or can be safely reached. */
4939 warning (OPT_Wframe_address, "calling %qD with "
4940 "a nonzero argument is unsafe", fndecl);
4943 /* For __builtin_frame_address, return what we've got. */
4944 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4945 return tem;
4947 if (!REG_P (tem)
4948 && ! CONSTANT_P (tem))
4949 tem = copy_addr_to_reg (tem);
4950 return tem;
4954 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4955 failed and the caller should emit a normal call. */
4957 static rtx
4958 expand_builtin_alloca (tree exp)
4960 rtx op0;
4961 rtx result;
4962 unsigned int align;
4963 tree fndecl = get_callee_fndecl (exp);
4964 HOST_WIDE_INT max_size;
4965 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4966 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
4967 bool valid_arglist
4968 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
4969 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
4970 VOID_TYPE)
4971 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
4972 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4973 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4975 if (!valid_arglist)
4976 return NULL_RTX;
4978 /* Compute the argument. */
4979 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4981 /* Compute the alignment. */
4982 align = (fcode == BUILT_IN_ALLOCA
4983 ? BIGGEST_ALIGNMENT
4984 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
4986 /* Compute the maximum size. */
4987 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
4988 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
4989 : -1);
4991 /* Allocate the desired space. If the allocation stems from the declaration
4992 of a variable-sized object, it cannot accumulate. */
4993 result
4994 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
4995 result = convert_memory_address (ptr_mode, result);
4997 /* Dynamic allocations for variables are recorded during gimplification. */
4998 if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
4999 record_dynamic_alloc (exp);
5001 return result;
5004 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5005 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5006 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5007 handle_builtin_stack_restore function. */
5009 static rtx
5010 expand_asan_emit_allocas_unpoison (tree exp)
5012 tree arg0 = CALL_EXPR_ARG (exp, 0);
5013 tree arg1 = CALL_EXPR_ARG (exp, 1);
5014 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5015 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5016 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5017 stack_pointer_rtx, NULL_RTX, 0,
5018 OPTAB_LIB_WIDEN);
5019 off = convert_modes (ptr_mode, Pmode, off, 0);
5020 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5021 OPTAB_LIB_WIDEN);
5022 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5023 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5024 top, ptr_mode, bot, ptr_mode);
5025 return ret;
5028 /* Expand a call to bswap builtin in EXP.
5029 Return NULL_RTX if a normal call should be emitted rather than expanding the
5030 function in-line. If convenient, the result should be placed in TARGET.
5031 SUBTARGET may be used as the target for computing one of EXP's operands. */
5033 static rtx
5034 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5035 rtx subtarget)
5037 tree arg;
5038 rtx op0;
5040 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5041 return NULL_RTX;
5043 arg = CALL_EXPR_ARG (exp, 0);
5044 op0 = expand_expr (arg,
5045 subtarget && GET_MODE (subtarget) == target_mode
5046 ? subtarget : NULL_RTX,
5047 target_mode, EXPAND_NORMAL);
5048 if (GET_MODE (op0) != target_mode)
5049 op0 = convert_to_mode (target_mode, op0, 1);
5051 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5053 gcc_assert (target);
5055 return convert_to_mode (target_mode, target, 1);
5058 /* Expand a call to a unary builtin in EXP.
5059 Return NULL_RTX if a normal call should be emitted rather than expanding the
5060 function in-line. If convenient, the result should be placed in TARGET.
5061 SUBTARGET may be used as the target for computing one of EXP's operands. */
5063 static rtx
5064 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5065 rtx subtarget, optab op_optab)
5067 rtx op0;
5069 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5070 return NULL_RTX;
5072 /* Compute the argument. */
5073 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5074 (subtarget
5075 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5076 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5077 VOIDmode, EXPAND_NORMAL);
5078 /* Compute op, into TARGET if possible.
5079 Set TARGET to wherever the result comes back. */
5080 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5081 op_optab, op0, target, op_optab != clrsb_optab);
5082 gcc_assert (target);
5084 return convert_to_mode (target_mode, target, 0);
5087 /* Expand a call to __builtin_expect. We just return our argument
5088 as the builtin_expect semantic should've been already executed by
5089 tree branch prediction pass. */
5091 static rtx
5092 expand_builtin_expect (tree exp, rtx target)
5094 tree arg;
5096 if (call_expr_nargs (exp) < 2)
5097 return const0_rtx;
5098 arg = CALL_EXPR_ARG (exp, 0);
5100 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5101 /* When guessing was done, the hints should be already stripped away. */
5102 gcc_assert (!flag_guess_branch_prob
5103 || optimize == 0 || seen_error ());
5104 return target;
5107 /* Expand a call to __builtin_expect_with_probability. We just return our
5108 argument as the builtin_expect semantic should've been already executed by
5109 tree branch prediction pass. */
5111 static rtx
5112 expand_builtin_expect_with_probability (tree exp, rtx target)
5114 tree arg;
5116 if (call_expr_nargs (exp) < 3)
5117 return const0_rtx;
5118 arg = CALL_EXPR_ARG (exp, 0);
5120 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5121 /* When guessing was done, the hints should be already stripped away. */
5122 gcc_assert (!flag_guess_branch_prob
5123 || optimize == 0 || seen_error ());
5124 return target;
5128 /* Expand a call to __builtin_assume_aligned. We just return our first
5129 argument as the builtin_assume_aligned semantic should've been already
5130 executed by CCP. */
5132 static rtx
5133 expand_builtin_assume_aligned (tree exp, rtx target)
5135 if (call_expr_nargs (exp) < 2)
5136 return const0_rtx;
5137 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5138 EXPAND_NORMAL);
5139 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5140 && (call_expr_nargs (exp) < 3
5141 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5142 return target;
5145 void
5146 expand_builtin_trap (void)
5148 if (targetm.have_trap ())
5150 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5151 /* For trap insns when not accumulating outgoing args force
5152 REG_ARGS_SIZE note to prevent crossjumping of calls with
5153 different args sizes. */
5154 if (!ACCUMULATE_OUTGOING_ARGS)
5155 add_args_size_note (insn, stack_pointer_delta);
5157 else
5159 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5160 tree call_expr = build_call_expr (fn, 0);
5161 expand_call (call_expr, NULL_RTX, false);
5164 emit_barrier ();
5167 /* Expand a call to __builtin_unreachable. We do nothing except emit
5168 a barrier saying that control flow will not pass here.
5170 It is the responsibility of the program being compiled to ensure
5171 that control flow does never reach __builtin_unreachable. */
5172 static void
5173 expand_builtin_unreachable (void)
5175 emit_barrier ();
5178 /* Expand EXP, a call to fabs, fabsf or fabsl.
5179 Return NULL_RTX if a normal call should be emitted rather than expanding
5180 the function inline. If convenient, the result should be placed
5181 in TARGET. SUBTARGET may be used as the target for computing
5182 the operand. */
5184 static rtx
5185 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5187 machine_mode mode;
5188 tree arg;
5189 rtx op0;
5191 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5192 return NULL_RTX;
5194 arg = CALL_EXPR_ARG (exp, 0);
5195 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5196 mode = TYPE_MODE (TREE_TYPE (arg));
5197 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5198 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5201 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5202 Return NULL is a normal call should be emitted rather than expanding the
5203 function inline. If convenient, the result should be placed in TARGET.
5204 SUBTARGET may be used as the target for computing the operand. */
5206 static rtx
5207 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5209 rtx op0, op1;
5210 tree arg;
5212 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5213 return NULL_RTX;
5215 arg = CALL_EXPR_ARG (exp, 0);
5216 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5218 arg = CALL_EXPR_ARG (exp, 1);
5219 op1 = expand_normal (arg);
5221 return expand_copysign (op0, op1, target);
5224 /* Emit a call to __builtin___clear_cache. */
5226 void
5227 default_emit_call_builtin___clear_cache (rtx begin, rtx end)
5229 rtx callee = gen_rtx_SYMBOL_REF (Pmode,
5230 BUILTIN_ASM_NAME_PTR
5231 (BUILT_IN_CLEAR_CACHE));
5233 emit_library_call (callee,
5234 LCT_NORMAL, VOIDmode,
5235 convert_memory_address (ptr_mode, begin), ptr_mode,
5236 convert_memory_address (ptr_mode, end), ptr_mode);
5239 /* Emit a call to __builtin___clear_cache, unless the target specifies
5240 it as do-nothing. This function can be used by trampoline
5241 finalizers to duplicate the effects of expanding a call to the
5242 clear_cache builtin. */
5244 void
5245 maybe_emit_call_builtin___clear_cache (rtx begin, rtx end)
5247 gcc_assert ((GET_MODE (begin) == ptr_mode || GET_MODE (begin) == Pmode
5248 || CONST_INT_P (begin))
5249 && (GET_MODE (end) == ptr_mode || GET_MODE (end) == Pmode
5250 || CONST_INT_P (end)));
5252 if (targetm.have_clear_cache ())
5254 /* We have a "clear_cache" insn, and it will handle everything. */
5255 class expand_operand ops[2];
5257 create_address_operand (&ops[0], begin);
5258 create_address_operand (&ops[1], end);
5260 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5261 return;
5263 else
5265 #ifndef CLEAR_INSN_CACHE
5266 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5267 does nothing. There is no need to call it. Do nothing. */
5268 return;
5269 #endif /* CLEAR_INSN_CACHE */
5272 targetm.calls.emit_call_builtin___clear_cache (begin, end);
5275 /* Expand a call to __builtin___clear_cache. */
5277 static void
5278 expand_builtin___clear_cache (tree exp)
5280 tree begin, end;
5281 rtx begin_rtx, end_rtx;
5283 /* We must not expand to a library call. If we did, any
5284 fallback library function in libgcc that might contain a call to
5285 __builtin___clear_cache() would recurse infinitely. */
5286 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5288 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5289 return;
5292 begin = CALL_EXPR_ARG (exp, 0);
5293 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5295 end = CALL_EXPR_ARG (exp, 1);
5296 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5298 maybe_emit_call_builtin___clear_cache (begin_rtx, end_rtx);
5301 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5303 static rtx
5304 round_trampoline_addr (rtx tramp)
5306 rtx temp, addend, mask;
5308 /* If we don't need too much alignment, we'll have been guaranteed
5309 proper alignment by get_trampoline_type. */
5310 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5311 return tramp;
5313 /* Round address up to desired boundary. */
5314 temp = gen_reg_rtx (Pmode);
5315 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5316 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5318 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5319 temp, 0, OPTAB_LIB_WIDEN);
5320 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5321 temp, 0, OPTAB_LIB_WIDEN);
5323 return tramp;
5326 static rtx
5327 expand_builtin_init_trampoline (tree exp, bool onstack)
5329 tree t_tramp, t_func, t_chain;
5330 rtx m_tramp, r_tramp, r_chain, tmp;
5332 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5333 POINTER_TYPE, VOID_TYPE))
5334 return NULL_RTX;
5336 t_tramp = CALL_EXPR_ARG (exp, 0);
5337 t_func = CALL_EXPR_ARG (exp, 1);
5338 t_chain = CALL_EXPR_ARG (exp, 2);
5340 r_tramp = expand_normal (t_tramp);
5341 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5342 MEM_NOTRAP_P (m_tramp) = 1;
5344 /* If ONSTACK, the TRAMP argument should be the address of a field
5345 within the local function's FRAME decl. Either way, let's see if
5346 we can fill in the MEM_ATTRs for this memory. */
5347 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5348 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5350 /* Creator of a heap trampoline is responsible for making sure the
5351 address is aligned to at least STACK_BOUNDARY. Normally malloc
5352 will ensure this anyhow. */
5353 tmp = round_trampoline_addr (r_tramp);
5354 if (tmp != r_tramp)
5356 m_tramp = change_address (m_tramp, BLKmode, tmp);
5357 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5358 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5361 /* The FUNC argument should be the address of the nested function.
5362 Extract the actual function decl to pass to the hook. */
5363 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5364 t_func = TREE_OPERAND (t_func, 0);
5365 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5367 r_chain = expand_normal (t_chain);
5369 /* Generate insns to initialize the trampoline. */
5370 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5372 if (onstack)
5374 trampolines_created = 1;
5376 if (targetm.calls.custom_function_descriptors != 0)
5377 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5378 "trampoline generated for nested function %qD", t_func);
5381 return const0_rtx;
5384 static rtx
5385 expand_builtin_adjust_trampoline (tree exp)
5387 rtx tramp;
5389 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5390 return NULL_RTX;
5392 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5393 tramp = round_trampoline_addr (tramp);
5394 if (targetm.calls.trampoline_adjust_address)
5395 tramp = targetm.calls.trampoline_adjust_address (tramp);
5397 return tramp;
5400 /* Expand a call to the builtin descriptor initialization routine.
5401 A descriptor is made up of a couple of pointers to the static
5402 chain and the code entry in this order. */
5404 static rtx
5405 expand_builtin_init_descriptor (tree exp)
5407 tree t_descr, t_func, t_chain;
5408 rtx m_descr, r_descr, r_func, r_chain;
5410 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5411 VOID_TYPE))
5412 return NULL_RTX;
5414 t_descr = CALL_EXPR_ARG (exp, 0);
5415 t_func = CALL_EXPR_ARG (exp, 1);
5416 t_chain = CALL_EXPR_ARG (exp, 2);
5418 r_descr = expand_normal (t_descr);
5419 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5420 MEM_NOTRAP_P (m_descr) = 1;
5421 set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
5423 r_func = expand_normal (t_func);
5424 r_chain = expand_normal (t_chain);
5426 /* Generate insns to initialize the descriptor. */
5427 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5428 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5429 POINTER_SIZE / BITS_PER_UNIT), r_func);
5431 return const0_rtx;
5434 /* Expand a call to the builtin descriptor adjustment routine. */
5436 static rtx
5437 expand_builtin_adjust_descriptor (tree exp)
5439 rtx tramp;
5441 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5442 return NULL_RTX;
5444 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5446 /* Unalign the descriptor to allow runtime identification. */
5447 tramp = plus_constant (ptr_mode, tramp,
5448 targetm.calls.custom_function_descriptors);
5450 return force_operand (tramp, NULL_RTX);
5453 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5454 function. The function first checks whether the back end provides
5455 an insn to implement signbit for the respective mode. If not, it
5456 checks whether the floating point format of the value is such that
5457 the sign bit can be extracted. If that is not the case, error out.
5458 EXP is the expression that is a call to the builtin function; if
5459 convenient, the result should be placed in TARGET. */
5460 static rtx
5461 expand_builtin_signbit (tree exp, rtx target)
5463 const struct real_format *fmt;
5464 scalar_float_mode fmode;
5465 scalar_int_mode rmode, imode;
5466 tree arg;
5467 int word, bitpos;
5468 enum insn_code icode;
5469 rtx temp;
5470 location_t loc = EXPR_LOCATION (exp);
5472 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5473 return NULL_RTX;
5475 arg = CALL_EXPR_ARG (exp, 0);
5476 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5477 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5478 fmt = REAL_MODE_FORMAT (fmode);
5480 arg = builtin_save_expr (arg);
5482 /* Expand the argument yielding a RTX expression. */
5483 temp = expand_normal (arg);
5485 /* Check if the back end provides an insn that handles signbit for the
5486 argument's mode. */
5487 icode = optab_handler (signbit_optab, fmode);
5488 if (icode != CODE_FOR_nothing)
5490 rtx_insn *last = get_last_insn ();
5491 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5492 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5493 return target;
5494 delete_insns_since (last);
5497 /* For floating point formats without a sign bit, implement signbit
5498 as "ARG < 0.0". */
5499 bitpos = fmt->signbit_ro;
5500 if (bitpos < 0)
5502 /* But we can't do this if the format supports signed zero. */
5503 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5505 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5506 build_real (TREE_TYPE (arg), dconst0));
5507 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5510 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5512 imode = int_mode_for_mode (fmode).require ();
5513 temp = gen_lowpart (imode, temp);
5515 else
5517 imode = word_mode;
5518 /* Handle targets with different FP word orders. */
5519 if (FLOAT_WORDS_BIG_ENDIAN)
5520 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5521 else
5522 word = bitpos / BITS_PER_WORD;
5523 temp = operand_subword_force (temp, word, fmode);
5524 bitpos = bitpos % BITS_PER_WORD;
5527 /* Force the intermediate word_mode (or narrower) result into a
5528 register. This avoids attempting to create paradoxical SUBREGs
5529 of floating point modes below. */
5530 temp = force_reg (imode, temp);
5532 /* If the bitpos is within the "result mode" lowpart, the operation
5533 can be implement with a single bitwise AND. Otherwise, we need
5534 a right shift and an AND. */
5536 if (bitpos < GET_MODE_BITSIZE (rmode))
5538 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5540 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5541 temp = gen_lowpart (rmode, temp);
5542 temp = expand_binop (rmode, and_optab, temp,
5543 immed_wide_int_const (mask, rmode),
5544 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5546 else
5548 /* Perform a logical right shift to place the signbit in the least
5549 significant bit, then truncate the result to the desired mode
5550 and mask just this bit. */
5551 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5552 temp = gen_lowpart (rmode, temp);
5553 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5554 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5557 return temp;
5560 /* Expand fork or exec calls. TARGET is the desired target of the
5561 call. EXP is the call. FN is the
5562 identificator of the actual function. IGNORE is nonzero if the
5563 value is to be ignored. */
5565 static rtx
5566 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5568 tree id, decl;
5569 tree call;
5571 /* If we are not profiling, just call the function. */
5572 if (!profile_arc_flag)
5573 return NULL_RTX;
5575 /* Otherwise call the wrapper. This should be equivalent for the rest of
5576 compiler, so the code does not diverge, and the wrapper may run the
5577 code necessary for keeping the profiling sane. */
5579 switch (DECL_FUNCTION_CODE (fn))
5581 case BUILT_IN_FORK:
5582 id = get_identifier ("__gcov_fork");
5583 break;
5585 case BUILT_IN_EXECL:
5586 id = get_identifier ("__gcov_execl");
5587 break;
5589 case BUILT_IN_EXECV:
5590 id = get_identifier ("__gcov_execv");
5591 break;
5593 case BUILT_IN_EXECLP:
5594 id = get_identifier ("__gcov_execlp");
5595 break;
5597 case BUILT_IN_EXECLE:
5598 id = get_identifier ("__gcov_execle");
5599 break;
5601 case BUILT_IN_EXECVP:
5602 id = get_identifier ("__gcov_execvp");
5603 break;
5605 case BUILT_IN_EXECVE:
5606 id = get_identifier ("__gcov_execve");
5607 break;
5609 default:
5610 gcc_unreachable ();
5613 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5614 FUNCTION_DECL, id, TREE_TYPE (fn));
5615 DECL_EXTERNAL (decl) = 1;
5616 TREE_PUBLIC (decl) = 1;
5617 DECL_ARTIFICIAL (decl) = 1;
5618 TREE_NOTHROW (decl) = 1;
5619 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5620 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5621 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5622 return expand_call (call, target, ignore);
5627 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5628 the pointer in these functions is void*, the tree optimizers may remove
5629 casts. The mode computed in expand_builtin isn't reliable either, due
5630 to __sync_bool_compare_and_swap.
5632 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5633 group of builtins. This gives us log2 of the mode size. */
5635 static inline machine_mode
5636 get_builtin_sync_mode (int fcode_diff)
5638 /* The size is not negotiable, so ask not to get BLKmode in return
5639 if the target indicates that a smaller size would be better. */
5640 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5643 /* Expand the memory expression LOC and return the appropriate memory operand
5644 for the builtin_sync operations. */
5646 static rtx
5647 get_builtin_sync_mem (tree loc, machine_mode mode)
5649 rtx addr, mem;
5650 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
5651 ? TREE_TYPE (TREE_TYPE (loc))
5652 : TREE_TYPE (loc));
5653 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
5655 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
5656 addr = convert_memory_address (addr_mode, addr);
5658 /* Note that we explicitly do not want any alias information for this
5659 memory, so that we kill all other live memories. Otherwise we don't
5660 satisfy the full barrier semantics of the intrinsic. */
5661 mem = gen_rtx_MEM (mode, addr);
5663 set_mem_addr_space (mem, addr_space);
5665 mem = validize_mem (mem);
5667 /* The alignment needs to be at least according to that of the mode. */
5668 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5669 get_pointer_alignment (loc)));
5670 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5671 MEM_VOLATILE_P (mem) = 1;
5673 return mem;
5676 /* Make sure an argument is in the right mode.
5677 EXP is the tree argument.
5678 MODE is the mode it should be in. */
5680 static rtx
5681 expand_expr_force_mode (tree exp, machine_mode mode)
5683 rtx val;
5684 machine_mode old_mode;
5686 if (TREE_CODE (exp) == SSA_NAME
5687 && TYPE_MODE (TREE_TYPE (exp)) != mode)
5689 /* Undo argument promotion if possible, as combine might not
5690 be able to do it later due to MEM_VOLATILE_P uses in the
5691 patterns. */
5692 gimple *g = get_gimple_for_ssa_name (exp);
5693 if (g && gimple_assign_cast_p (g))
5695 tree rhs = gimple_assign_rhs1 (g);
5696 tree_code code = gimple_assign_rhs_code (g);
5697 if (CONVERT_EXPR_CODE_P (code)
5698 && TYPE_MODE (TREE_TYPE (rhs)) == mode
5699 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
5700 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
5701 && (TYPE_PRECISION (TREE_TYPE (exp))
5702 > TYPE_PRECISION (TREE_TYPE (rhs))))
5703 exp = rhs;
5707 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5708 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5709 of CONST_INTs, where we know the old_mode only from the call argument. */
5711 old_mode = GET_MODE (val);
5712 if (old_mode == VOIDmode)
5713 old_mode = TYPE_MODE (TREE_TYPE (exp));
5714 val = convert_modes (mode, old_mode, val, 1);
5715 return val;
5719 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5720 EXP is the CALL_EXPR. CODE is the rtx code
5721 that corresponds to the arithmetic or logical operation from the name;
5722 an exception here is that NOT actually means NAND. TARGET is an optional
5723 place for us to store the results; AFTER is true if this is the
5724 fetch_and_xxx form. */
5726 static rtx
5727 expand_builtin_sync_operation (machine_mode mode, tree exp,
5728 enum rtx_code code, bool after,
5729 rtx target)
5731 rtx val, mem;
5732 location_t loc = EXPR_LOCATION (exp);
5734 if (code == NOT && warn_sync_nand)
5736 tree fndecl = get_callee_fndecl (exp);
5737 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5739 static bool warned_f_a_n, warned_n_a_f;
5741 switch (fcode)
5743 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5744 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5745 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5746 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5747 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5748 if (warned_f_a_n)
5749 break;
5751 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5752 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5753 warned_f_a_n = true;
5754 break;
5756 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5757 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5758 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5759 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5760 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5761 if (warned_n_a_f)
5762 break;
5764 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5765 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5766 warned_n_a_f = true;
5767 break;
5769 default:
5770 gcc_unreachable ();
5774 /* Expand the operands. */
5775 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5776 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5778 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5779 after);
5782 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5783 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5784 true if this is the boolean form. TARGET is a place for us to store the
5785 results; this is NOT optional if IS_BOOL is true. */
5787 static rtx
5788 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5789 bool is_bool, rtx target)
5791 rtx old_val, new_val, mem;
5792 rtx *pbool, *poval;
5794 /* Expand the operands. */
5795 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5796 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5797 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5799 pbool = poval = NULL;
5800 if (target != const0_rtx)
5802 if (is_bool)
5803 pbool = &target;
5804 else
5805 poval = &target;
5807 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5808 false, MEMMODEL_SYNC_SEQ_CST,
5809 MEMMODEL_SYNC_SEQ_CST))
5810 return NULL_RTX;
5812 return target;
5815 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5816 general form is actually an atomic exchange, and some targets only
5817 support a reduced form with the second argument being a constant 1.
5818 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5819 the results. */
5821 static rtx
5822 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5823 rtx target)
5825 rtx val, mem;
5827 /* Expand the operands. */
5828 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5829 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5831 return expand_sync_lock_test_and_set (target, mem, val);
5834 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5836 static void
5837 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5839 rtx mem;
5841 /* Expand the operands. */
5842 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5844 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5847 /* Given an integer representing an ``enum memmodel'', verify its
5848 correctness and return the memory model enum. */
5850 static enum memmodel
5851 get_memmodel (tree exp)
5853 /* If the parameter is not a constant, it's a run time value so we'll just
5854 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5855 if (TREE_CODE (exp) != INTEGER_CST)
5856 return MEMMODEL_SEQ_CST;
5858 rtx op = expand_normal (exp);
5860 unsigned HOST_WIDE_INT val = INTVAL (op);
5861 if (targetm.memmodel_check)
5862 val = targetm.memmodel_check (val);
5863 else if (val & ~MEMMODEL_MASK)
5864 return MEMMODEL_SEQ_CST;
5866 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5867 if (memmodel_base (val) >= MEMMODEL_LAST)
5868 return MEMMODEL_SEQ_CST;
5870 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5871 be conservative and promote consume to acquire. */
5872 if (val == MEMMODEL_CONSUME)
5873 val = MEMMODEL_ACQUIRE;
5875 return (enum memmodel) val;
5878 /* Expand the __atomic_exchange intrinsic:
5879 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5880 EXP is the CALL_EXPR.
5881 TARGET is an optional place for us to store the results. */
5883 static rtx
5884 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5886 rtx val, mem;
5887 enum memmodel model;
5889 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5891 if (!flag_inline_atomics)
5892 return NULL_RTX;
5894 /* Expand the operands. */
5895 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5896 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5898 return expand_atomic_exchange (target, mem, val, model);
5901 /* Expand the __atomic_compare_exchange intrinsic:
5902 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5903 TYPE desired, BOOL weak,
5904 enum memmodel success,
5905 enum memmodel failure)
5906 EXP is the CALL_EXPR.
5907 TARGET is an optional place for us to store the results. */
5909 static rtx
5910 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5911 rtx target)
5913 rtx expect, desired, mem, oldval;
5914 rtx_code_label *label;
5915 tree weak;
5916 bool is_weak;
5918 memmodel success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5919 memmodel failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5921 if (failure > success)
5922 success = MEMMODEL_SEQ_CST;
5924 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5926 failure = MEMMODEL_SEQ_CST;
5927 success = MEMMODEL_SEQ_CST;
5931 if (!flag_inline_atomics)
5932 return NULL_RTX;
5934 /* Expand the operands. */
5935 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5937 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5938 expect = convert_memory_address (Pmode, expect);
5939 expect = gen_rtx_MEM (mode, expect);
5940 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5942 weak = CALL_EXPR_ARG (exp, 3);
5943 is_weak = false;
5944 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5945 is_weak = true;
5947 if (target == const0_rtx)
5948 target = NULL;
5950 /* Lest the rtl backend create a race condition with an imporoper store
5951 to memory, always create a new pseudo for OLDVAL. */
5952 oldval = NULL;
5954 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5955 is_weak, success, failure))
5956 return NULL_RTX;
5958 /* Conditionally store back to EXPECT, lest we create a race condition
5959 with an improper store to memory. */
5960 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5961 the normal case where EXPECT is totally private, i.e. a register. At
5962 which point the store can be unconditional. */
5963 label = gen_label_rtx ();
5964 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5965 GET_MODE (target), 1, label);
5966 emit_move_insn (expect, oldval);
5967 emit_label (label);
5969 return target;
5972 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5973 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5974 call. The weak parameter must be dropped to match the expected parameter
5975 list and the expected argument changed from value to pointer to memory
5976 slot. */
5978 static void
5979 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5981 unsigned int z;
5982 vec<tree, va_gc> *vec;
5984 vec_alloc (vec, 5);
5985 vec->quick_push (gimple_call_arg (call, 0));
5986 tree expected = gimple_call_arg (call, 1);
5987 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5988 TREE_TYPE (expected));
5989 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5990 if (expd != x)
5991 emit_move_insn (x, expd);
5992 tree v = make_tree (TREE_TYPE (expected), x);
5993 vec->quick_push (build1 (ADDR_EXPR,
5994 build_pointer_type (TREE_TYPE (expected)), v));
5995 vec->quick_push (gimple_call_arg (call, 2));
5996 /* Skip the boolean weak parameter. */
5997 for (z = 4; z < 6; z++)
5998 vec->quick_push (gimple_call_arg (call, z));
5999 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6000 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6001 gcc_assert (bytes_log2 < 5);
6002 built_in_function fncode
6003 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6004 + bytes_log2);
6005 tree fndecl = builtin_decl_explicit (fncode);
6006 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6007 fndecl);
6008 tree exp = build_call_vec (boolean_type_node, fn, vec);
6009 tree lhs = gimple_call_lhs (call);
6010 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6011 if (lhs)
6013 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6014 if (GET_MODE (boolret) != mode)
6015 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6016 x = force_reg (mode, x);
6017 write_complex_part (target, boolret, true);
6018 write_complex_part (target, x, false);
6022 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6024 void
6025 expand_ifn_atomic_compare_exchange (gcall *call)
6027 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6028 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6029 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6031 memmodel success = get_memmodel (gimple_call_arg (call, 4));
6032 memmodel failure = get_memmodel (gimple_call_arg (call, 5));
6034 if (failure > success)
6035 success = MEMMODEL_SEQ_CST;
6037 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6039 failure = MEMMODEL_SEQ_CST;
6040 success = MEMMODEL_SEQ_CST;
6043 if (!flag_inline_atomics)
6045 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6046 return;
6049 /* Expand the operands. */
6050 rtx mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6052 rtx expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6053 rtx desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6055 bool is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6057 rtx boolret = NULL;
6058 rtx oldval = NULL;
6060 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6061 is_weak, success, failure))
6063 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6064 return;
6067 tree lhs = gimple_call_lhs (call);
6068 if (lhs)
6070 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6071 if (GET_MODE (boolret) != mode)
6072 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6073 write_complex_part (target, boolret, true);
6074 write_complex_part (target, oldval, false);
6078 /* Expand the __atomic_load intrinsic:
6079 TYPE __atomic_load (TYPE *object, enum memmodel)
6080 EXP is the CALL_EXPR.
6081 TARGET is an optional place for us to store the results. */
6083 static rtx
6084 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6086 memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6087 if (is_mm_release (model) || is_mm_acq_rel (model))
6088 model = MEMMODEL_SEQ_CST;
6090 if (!flag_inline_atomics)
6091 return NULL_RTX;
6093 /* Expand the operand. */
6094 rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6096 return expand_atomic_load (target, mem, model);
6100 /* Expand the __atomic_store intrinsic:
6101 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6102 EXP is the CALL_EXPR.
6103 TARGET is an optional place for us to store the results. */
6105 static rtx
6106 expand_builtin_atomic_store (machine_mode mode, tree exp)
6108 memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6109 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6110 || is_mm_release (model)))
6111 model = MEMMODEL_SEQ_CST;
6113 if (!flag_inline_atomics)
6114 return NULL_RTX;
6116 /* Expand the operands. */
6117 rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6118 rtx val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6120 return expand_atomic_store (mem, val, model, false);
6123 /* Expand the __atomic_fetch_XXX intrinsic:
6124 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6125 EXP is the CALL_EXPR.
6126 TARGET is an optional place for us to store the results.
6127 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6128 FETCH_AFTER is true if returning the result of the operation.
6129 FETCH_AFTER is false if returning the value before the operation.
6130 IGNORE is true if the result is not used.
6131 EXT_CALL is the correct builtin for an external call if this cannot be
6132 resolved to an instruction sequence. */
6134 static rtx
6135 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6136 enum rtx_code code, bool fetch_after,
6137 bool ignore, enum built_in_function ext_call)
6139 rtx val, mem, ret;
6140 enum memmodel model;
6141 tree fndecl;
6142 tree addr;
6144 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6146 /* Expand the operands. */
6147 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6148 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6150 /* Only try generating instructions if inlining is turned on. */
6151 if (flag_inline_atomics)
6153 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6154 if (ret)
6155 return ret;
6158 /* Return if a different routine isn't needed for the library call. */
6159 if (ext_call == BUILT_IN_NONE)
6160 return NULL_RTX;
6162 /* Change the call to the specified function. */
6163 fndecl = get_callee_fndecl (exp);
6164 addr = CALL_EXPR_FN (exp);
6165 STRIP_NOPS (addr);
6167 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6168 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6170 /* If we will emit code after the call, the call cannot be a tail call.
6171 If it is emitted as a tail call, a barrier is emitted after it, and
6172 then all trailing code is removed. */
6173 if (!ignore)
6174 CALL_EXPR_TAILCALL (exp) = 0;
6176 /* Expand the call here so we can emit trailing code. */
6177 ret = expand_call (exp, target, ignore);
6179 /* Replace the original function just in case it matters. */
6180 TREE_OPERAND (addr, 0) = fndecl;
6182 /* Then issue the arithmetic correction to return the right result. */
6183 if (!ignore)
6185 if (code == NOT)
6187 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6188 OPTAB_LIB_WIDEN);
6189 ret = expand_simple_unop (mode, NOT, ret, target, true);
6191 else
6192 ret = expand_simple_binop (mode, code, ret, val, target, true,
6193 OPTAB_LIB_WIDEN);
6195 return ret;
6198 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6200 void
6201 expand_ifn_atomic_bit_test_and (gcall *call)
6203 tree ptr = gimple_call_arg (call, 0);
6204 tree bit = gimple_call_arg (call, 1);
6205 tree flag = gimple_call_arg (call, 2);
6206 tree lhs = gimple_call_lhs (call);
6207 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6208 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6209 enum rtx_code code;
6210 optab optab;
6211 class expand_operand ops[5];
6213 gcc_assert (flag_inline_atomics);
6215 if (gimple_call_num_args (call) == 4)
6216 model = get_memmodel (gimple_call_arg (call, 3));
6218 rtx mem = get_builtin_sync_mem (ptr, mode);
6219 rtx val = expand_expr_force_mode (bit, mode);
6221 switch (gimple_call_internal_fn (call))
6223 case IFN_ATOMIC_BIT_TEST_AND_SET:
6224 code = IOR;
6225 optab = atomic_bit_test_and_set_optab;
6226 break;
6227 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6228 code = XOR;
6229 optab = atomic_bit_test_and_complement_optab;
6230 break;
6231 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6232 code = AND;
6233 optab = atomic_bit_test_and_reset_optab;
6234 break;
6235 default:
6236 gcc_unreachable ();
6239 if (lhs == NULL_TREE)
6241 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6242 val, NULL_RTX, true, OPTAB_DIRECT);
6243 if (code == AND)
6244 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6245 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6246 return;
6249 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6250 enum insn_code icode = direct_optab_handler (optab, mode);
6251 gcc_assert (icode != CODE_FOR_nothing);
6252 create_output_operand (&ops[0], target, mode);
6253 create_fixed_operand (&ops[1], mem);
6254 create_convert_operand_to (&ops[2], val, mode, true);
6255 create_integer_operand (&ops[3], model);
6256 create_integer_operand (&ops[4], integer_onep (flag));
6257 if (maybe_expand_insn (icode, 5, ops))
6258 return;
6260 rtx bitval = val;
6261 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6262 val, NULL_RTX, true, OPTAB_DIRECT);
6263 rtx maskval = val;
6264 if (code == AND)
6265 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6266 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6267 code, model, false);
6268 if (integer_onep (flag))
6270 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6271 NULL_RTX, true, OPTAB_DIRECT);
6272 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6273 true, OPTAB_DIRECT);
6275 else
6276 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6277 OPTAB_DIRECT);
6278 if (result != target)
6279 emit_move_insn (target, result);
6282 /* Expand IFN_ATOMIC_*_FETCH_CMP_0 internal function. */
6284 void
6285 expand_ifn_atomic_op_fetch_cmp_0 (gcall *call)
6287 tree cmp = gimple_call_arg (call, 0);
6288 tree ptr = gimple_call_arg (call, 1);
6289 tree arg = gimple_call_arg (call, 2);
6290 tree lhs = gimple_call_lhs (call);
6291 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6292 machine_mode mode = TYPE_MODE (TREE_TYPE (cmp));
6293 optab optab;
6294 rtx_code code;
6295 class expand_operand ops[5];
6297 gcc_assert (flag_inline_atomics);
6299 if (gimple_call_num_args (call) == 4)
6300 model = get_memmodel (gimple_call_arg (call, 3));
6302 rtx mem = get_builtin_sync_mem (ptr, mode);
6303 rtx op = expand_expr_force_mode (arg, mode);
6305 switch (gimple_call_internal_fn (call))
6307 case IFN_ATOMIC_ADD_FETCH_CMP_0:
6308 code = PLUS;
6309 optab = atomic_add_fetch_cmp_0_optab;
6310 break;
6311 case IFN_ATOMIC_SUB_FETCH_CMP_0:
6312 code = MINUS;
6313 optab = atomic_sub_fetch_cmp_0_optab;
6314 break;
6315 case IFN_ATOMIC_AND_FETCH_CMP_0:
6316 code = AND;
6317 optab = atomic_and_fetch_cmp_0_optab;
6318 break;
6319 case IFN_ATOMIC_OR_FETCH_CMP_0:
6320 code = IOR;
6321 optab = atomic_or_fetch_cmp_0_optab;
6322 break;
6323 case IFN_ATOMIC_XOR_FETCH_CMP_0:
6324 code = XOR;
6325 optab = atomic_xor_fetch_cmp_0_optab;
6326 break;
6327 default:
6328 gcc_unreachable ();
6331 enum rtx_code comp = UNKNOWN;
6332 switch (tree_to_uhwi (cmp))
6334 case ATOMIC_OP_FETCH_CMP_0_EQ: comp = EQ; break;
6335 case ATOMIC_OP_FETCH_CMP_0_NE: comp = NE; break;
6336 case ATOMIC_OP_FETCH_CMP_0_GT: comp = GT; break;
6337 case ATOMIC_OP_FETCH_CMP_0_GE: comp = GE; break;
6338 case ATOMIC_OP_FETCH_CMP_0_LT: comp = LT; break;
6339 case ATOMIC_OP_FETCH_CMP_0_LE: comp = LE; break;
6340 default: gcc_unreachable ();
6343 rtx target;
6344 if (lhs == NULL_TREE)
6345 target = gen_reg_rtx (TYPE_MODE (boolean_type_node));
6346 else
6347 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6348 enum insn_code icode = direct_optab_handler (optab, mode);
6349 gcc_assert (icode != CODE_FOR_nothing);
6350 create_output_operand (&ops[0], target, TYPE_MODE (boolean_type_node));
6351 create_fixed_operand (&ops[1], mem);
6352 create_convert_operand_to (&ops[2], op, mode, true);
6353 create_integer_operand (&ops[3], model);
6354 create_integer_operand (&ops[4], comp);
6355 if (maybe_expand_insn (icode, 5, ops))
6356 return;
6358 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, op,
6359 code, model, true);
6360 if (lhs)
6362 result = emit_store_flag_force (target, comp, result, const0_rtx, mode,
6363 0, 1);
6364 if (result != target)
6365 emit_move_insn (target, result);
6369 /* Expand an atomic clear operation.
6370 void _atomic_clear (BOOL *obj, enum memmodel)
6371 EXP is the call expression. */
6373 static rtx
6374 expand_builtin_atomic_clear (tree exp)
6376 machine_mode mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6377 rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6378 memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6380 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6381 model = MEMMODEL_SEQ_CST;
6383 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6384 Failing that, a store is issued by __atomic_store. The only way this can
6385 fail is if the bool type is larger than a word size. Unlikely, but
6386 handle it anyway for completeness. Assume a single threaded model since
6387 there is no atomic support in this case, and no barriers are required. */
6388 rtx ret = expand_atomic_store (mem, const0_rtx, model, true);
6389 if (!ret)
6390 emit_move_insn (mem, const0_rtx);
6391 return const0_rtx;
6394 /* Expand an atomic test_and_set operation.
6395 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6396 EXP is the call expression. */
6398 static rtx
6399 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6401 rtx mem;
6402 enum memmodel model;
6403 machine_mode mode;
6405 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6406 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6407 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6409 return expand_atomic_test_and_set (target, mem, model);
6413 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6414 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6416 static tree
6417 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6419 int size;
6420 machine_mode mode;
6421 unsigned int mode_align, type_align;
6423 if (TREE_CODE (arg0) != INTEGER_CST)
6424 return NULL_TREE;
6426 /* We need a corresponding integer mode for the access to be lock-free. */
6427 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6428 if (!int_mode_for_size (size, 0).exists (&mode))
6429 return boolean_false_node;
6431 mode_align = GET_MODE_ALIGNMENT (mode);
6433 if (TREE_CODE (arg1) == INTEGER_CST)
6435 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6437 /* Either this argument is null, or it's a fake pointer encoding
6438 the alignment of the object. */
6439 val = least_bit_hwi (val);
6440 val *= BITS_PER_UNIT;
6442 if (val == 0 || mode_align < val)
6443 type_align = mode_align;
6444 else
6445 type_align = val;
6447 else
6449 tree ttype = TREE_TYPE (arg1);
6451 /* This function is usually invoked and folded immediately by the front
6452 end before anything else has a chance to look at it. The pointer
6453 parameter at this point is usually cast to a void *, so check for that
6454 and look past the cast. */
6455 if (CONVERT_EXPR_P (arg1)
6456 && POINTER_TYPE_P (ttype)
6457 && VOID_TYPE_P (TREE_TYPE (ttype))
6458 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6459 arg1 = TREE_OPERAND (arg1, 0);
6461 ttype = TREE_TYPE (arg1);
6462 gcc_assert (POINTER_TYPE_P (ttype));
6464 /* Get the underlying type of the object. */
6465 ttype = TREE_TYPE (ttype);
6466 type_align = TYPE_ALIGN (ttype);
6469 /* If the object has smaller alignment, the lock free routines cannot
6470 be used. */
6471 if (type_align < mode_align)
6472 return boolean_false_node;
6474 /* Check if a compare_and_swap pattern exists for the mode which represents
6475 the required size. The pattern is not allowed to fail, so the existence
6476 of the pattern indicates support is present. Also require that an
6477 atomic load exists for the required size. */
6478 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6479 return boolean_true_node;
6480 else
6481 return boolean_false_node;
6484 /* Return true if the parameters to call EXP represent an object which will
6485 always generate lock free instructions. The first argument represents the
6486 size of the object, and the second parameter is a pointer to the object
6487 itself. If NULL is passed for the object, then the result is based on
6488 typical alignment for an object of the specified size. Otherwise return
6489 false. */
6491 static rtx
6492 expand_builtin_atomic_always_lock_free (tree exp)
6494 tree size;
6495 tree arg0 = CALL_EXPR_ARG (exp, 0);
6496 tree arg1 = CALL_EXPR_ARG (exp, 1);
6498 if (TREE_CODE (arg0) != INTEGER_CST)
6500 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
6501 return const0_rtx;
6504 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6505 if (size == boolean_true_node)
6506 return const1_rtx;
6507 return const0_rtx;
6510 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6511 is lock free on this architecture. */
6513 static tree
6514 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6516 if (!flag_inline_atomics)
6517 return NULL_TREE;
6519 /* If it isn't always lock free, don't generate a result. */
6520 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6521 return boolean_true_node;
6523 return NULL_TREE;
6526 /* Return true if the parameters to call EXP represent an object which will
6527 always generate lock free instructions. The first argument represents the
6528 size of the object, and the second parameter is a pointer to the object
6529 itself. If NULL is passed for the object, then the result is based on
6530 typical alignment for an object of the specified size. Otherwise return
6531 NULL*/
6533 static rtx
6534 expand_builtin_atomic_is_lock_free (tree exp)
6536 tree size;
6537 tree arg0 = CALL_EXPR_ARG (exp, 0);
6538 tree arg1 = CALL_EXPR_ARG (exp, 1);
6540 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6542 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
6543 return NULL_RTX;
6546 if (!flag_inline_atomics)
6547 return NULL_RTX;
6549 /* If the value is known at compile time, return the RTX for it. */
6550 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6551 if (size == boolean_true_node)
6552 return const1_rtx;
6554 return NULL_RTX;
6557 /* Expand the __atomic_thread_fence intrinsic:
6558 void __atomic_thread_fence (enum memmodel)
6559 EXP is the CALL_EXPR. */
6561 static void
6562 expand_builtin_atomic_thread_fence (tree exp)
6564 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6565 expand_mem_thread_fence (model);
6568 /* Expand the __atomic_signal_fence intrinsic:
6569 void __atomic_signal_fence (enum memmodel)
6570 EXP is the CALL_EXPR. */
6572 static void
6573 expand_builtin_atomic_signal_fence (tree exp)
6575 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6576 expand_mem_signal_fence (model);
6579 /* Expand the __sync_synchronize intrinsic. */
6581 static void
6582 expand_builtin_sync_synchronize (void)
6584 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6587 static rtx
6588 expand_builtin_thread_pointer (tree exp, rtx target)
6590 enum insn_code icode;
6591 if (!validate_arglist (exp, VOID_TYPE))
6592 return const0_rtx;
6593 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6594 if (icode != CODE_FOR_nothing)
6596 class expand_operand op;
6597 /* If the target is not sutitable then create a new target. */
6598 if (target == NULL_RTX
6599 || !REG_P (target)
6600 || GET_MODE (target) != Pmode)
6601 target = gen_reg_rtx (Pmode);
6602 create_output_operand (&op, target, Pmode);
6603 expand_insn (icode, 1, &op);
6604 return target;
6606 error ("%<__builtin_thread_pointer%> is not supported on this target");
6607 return const0_rtx;
6610 static void
6611 expand_builtin_set_thread_pointer (tree exp)
6613 enum insn_code icode;
6614 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6615 return;
6616 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6617 if (icode != CODE_FOR_nothing)
6619 class expand_operand op;
6620 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6621 Pmode, EXPAND_NORMAL);
6622 create_input_operand (&op, val, Pmode);
6623 expand_insn (icode, 1, &op);
6624 return;
6626 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
6630 /* Emit code to restore the current value of stack. */
6632 static void
6633 expand_stack_restore (tree var)
6635 rtx_insn *prev;
6636 rtx sa = expand_normal (var);
6638 sa = convert_memory_address (Pmode, sa);
6640 prev = get_last_insn ();
6641 emit_stack_restore (SAVE_BLOCK, sa);
6643 record_new_stack_level ();
6645 fixup_args_size_notes (prev, get_last_insn (), 0);
6648 /* Emit code to save the current value of stack. */
6650 static rtx
6651 expand_stack_save (void)
6653 rtx ret = NULL_RTX;
6655 emit_stack_save (SAVE_BLOCK, &ret);
6656 return ret;
6659 /* Emit code to get the openacc gang, worker or vector id or size. */
6661 static rtx
6662 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
6664 const char *name;
6665 rtx fallback_retval;
6666 rtx_insn *(*gen_fn) (rtx, rtx);
6667 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
6669 case BUILT_IN_GOACC_PARLEVEL_ID:
6670 name = "__builtin_goacc_parlevel_id";
6671 fallback_retval = const0_rtx;
6672 gen_fn = targetm.gen_oacc_dim_pos;
6673 break;
6674 case BUILT_IN_GOACC_PARLEVEL_SIZE:
6675 name = "__builtin_goacc_parlevel_size";
6676 fallback_retval = const1_rtx;
6677 gen_fn = targetm.gen_oacc_dim_size;
6678 break;
6679 default:
6680 gcc_unreachable ();
6683 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
6685 error ("%qs only supported in OpenACC code", name);
6686 return const0_rtx;
6689 tree arg = CALL_EXPR_ARG (exp, 0);
6690 if (TREE_CODE (arg) != INTEGER_CST)
6692 error ("non-constant argument 0 to %qs", name);
6693 return const0_rtx;
6696 int dim = TREE_INT_CST_LOW (arg);
6697 switch (dim)
6699 case GOMP_DIM_GANG:
6700 case GOMP_DIM_WORKER:
6701 case GOMP_DIM_VECTOR:
6702 break;
6703 default:
6704 error ("illegal argument 0 to %qs", name);
6705 return const0_rtx;
6708 if (ignore)
6709 return target;
6711 if (target == NULL_RTX)
6712 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6714 if (!targetm.have_oacc_dim_size ())
6716 emit_move_insn (target, fallback_retval);
6717 return target;
6720 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
6721 emit_insn (gen_fn (reg, GEN_INT (dim)));
6722 if (reg != target)
6723 emit_move_insn (target, reg);
6725 return target;
6728 /* Expand a string compare operation using a sequence of char comparison
6729 to get rid of the calling overhead, with result going to TARGET if
6730 that's convenient.
6732 VAR_STR is the variable string source;
6733 CONST_STR is the constant string source;
6734 LENGTH is the number of chars to compare;
6735 CONST_STR_N indicates which source string is the constant string;
6736 IS_MEMCMP indicates whether it's a memcmp or strcmp.
6738 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
6740 target = (int) (unsigned char) var_str[0]
6741 - (int) (unsigned char) const_str[0];
6742 if (target != 0)
6743 goto ne_label;
6745 target = (int) (unsigned char) var_str[length - 2]
6746 - (int) (unsigned char) const_str[length - 2];
6747 if (target != 0)
6748 goto ne_label;
6749 target = (int) (unsigned char) var_str[length - 1]
6750 - (int) (unsigned char) const_str[length - 1];
6751 ne_label:
6754 static rtx
6755 inline_string_cmp (rtx target, tree var_str, const char *const_str,
6756 unsigned HOST_WIDE_INT length,
6757 int const_str_n, machine_mode mode)
6759 HOST_WIDE_INT offset = 0;
6760 rtx var_rtx_array
6761 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
6762 rtx var_rtx = NULL_RTX;
6763 rtx const_rtx = NULL_RTX;
6764 rtx result = target ? target : gen_reg_rtx (mode);
6765 rtx_code_label *ne_label = gen_label_rtx ();
6766 tree unit_type_node = unsigned_char_type_node;
6767 scalar_int_mode unit_mode
6768 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
6770 start_sequence ();
6772 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
6774 var_rtx
6775 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
6776 const_rtx = c_readstr (const_str + offset, unit_mode);
6777 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
6778 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
6780 op0 = convert_modes (mode, unit_mode, op0, 1);
6781 op1 = convert_modes (mode, unit_mode, op1, 1);
6782 result = expand_simple_binop (mode, MINUS, op0, op1,
6783 result, 1, OPTAB_WIDEN);
6784 if (i < length - 1)
6785 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
6786 mode, true, ne_label);
6787 offset += GET_MODE_SIZE (unit_mode);
6790 emit_label (ne_label);
6791 rtx_insn *insns = get_insns ();
6792 end_sequence ();
6793 emit_insn (insns);
6795 return result;
6798 /* Inline expansion of a call to str(n)cmp and memcmp, with result going
6799 to TARGET if that's convenient.
6800 If the call is not been inlined, return NULL_RTX. */
6802 static rtx
6803 inline_expand_builtin_bytecmp (tree exp, rtx target)
6805 tree fndecl = get_callee_fndecl (exp);
6806 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6807 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
6809 /* Do NOT apply this inlining expansion when optimizing for size or
6810 optimization level below 2. */
6811 if (optimize < 2 || optimize_insn_for_size_p ())
6812 return NULL_RTX;
6814 gcc_checking_assert (fcode == BUILT_IN_STRCMP
6815 || fcode == BUILT_IN_STRNCMP
6816 || fcode == BUILT_IN_MEMCMP);
6818 /* On a target where the type of the call (int) has same or narrower presicion
6819 than unsigned char, give up the inlining expansion. */
6820 if (TYPE_PRECISION (unsigned_char_type_node)
6821 >= TYPE_PRECISION (TREE_TYPE (exp)))
6822 return NULL_RTX;
6824 tree arg1 = CALL_EXPR_ARG (exp, 0);
6825 tree arg2 = CALL_EXPR_ARG (exp, 1);
6826 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
6828 unsigned HOST_WIDE_INT len1 = 0;
6829 unsigned HOST_WIDE_INT len2 = 0;
6830 unsigned HOST_WIDE_INT len3 = 0;
6832 /* Get the object representation of the initializers of ARG1 and ARG2
6833 as strings, provided they refer to constant objects, with their byte
6834 sizes in LEN1 and LEN2, respectively. */
6835 const char *bytes1 = getbyterep (arg1, &len1);
6836 const char *bytes2 = getbyterep (arg2, &len2);
6838 /* Fail if neither argument refers to an initialized constant. */
6839 if (!bytes1 && !bytes2)
6840 return NULL_RTX;
6842 if (is_ncmp)
6844 /* Fail if the memcmp/strncmp bound is not a constant. */
6845 if (!tree_fits_uhwi_p (len3_tree))
6846 return NULL_RTX;
6848 len3 = tree_to_uhwi (len3_tree);
6850 if (fcode == BUILT_IN_MEMCMP)
6852 /* Fail if the memcmp bound is greater than the size of either
6853 of the two constant objects. */
6854 if ((bytes1 && len1 < len3)
6855 || (bytes2 && len2 < len3))
6856 return NULL_RTX;
6860 if (fcode != BUILT_IN_MEMCMP)
6862 /* For string functions (i.e., strcmp and strncmp) reduce LEN1
6863 and LEN2 to the length of the nul-terminated string stored
6864 in each. */
6865 if (bytes1 != NULL)
6866 len1 = strnlen (bytes1, len1) + 1;
6867 if (bytes2 != NULL)
6868 len2 = strnlen (bytes2, len2) + 1;
6871 /* See inline_string_cmp. */
6872 int const_str_n;
6873 if (!len1)
6874 const_str_n = 2;
6875 else if (!len2)
6876 const_str_n = 1;
6877 else if (len2 > len1)
6878 const_str_n = 1;
6879 else
6880 const_str_n = 2;
6882 /* For strncmp only, compute the new bound as the smallest of
6883 the lengths of the two strings (plus 1) and the bound provided
6884 to the function. */
6885 unsigned HOST_WIDE_INT bound = (const_str_n == 1) ? len1 : len2;
6886 if (is_ncmp && len3 < bound)
6887 bound = len3;
6889 /* If the bound of the comparison is larger than the threshold,
6890 do nothing. */
6891 if (bound > (unsigned HOST_WIDE_INT) param_builtin_string_cmp_inline_length)
6892 return NULL_RTX;
6894 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6896 /* Now, start inline expansion the call. */
6897 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
6898 (const_str_n == 1) ? bytes1 : bytes2, bound,
6899 const_str_n, mode);
6902 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
6903 represents the size of the first argument to that call, or VOIDmode
6904 if the argument is a pointer. IGNORE will be true if the result
6905 isn't used. */
6906 static rtx
6907 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
6908 bool ignore)
6910 rtx val, failsafe;
6911 unsigned nargs = call_expr_nargs (exp);
6913 tree arg0 = CALL_EXPR_ARG (exp, 0);
6915 if (mode == VOIDmode)
6917 mode = TYPE_MODE (TREE_TYPE (arg0));
6918 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
6921 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
6923 /* An optional second argument can be used as a failsafe value on
6924 some machines. If it isn't present, then the failsafe value is
6925 assumed to be 0. */
6926 if (nargs > 1)
6928 tree arg1 = CALL_EXPR_ARG (exp, 1);
6929 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
6931 else
6932 failsafe = const0_rtx;
6934 /* If the result isn't used, the behavior is undefined. It would be
6935 nice to emit a warning here, but path splitting means this might
6936 happen with legitimate code. So simply drop the builtin
6937 expansion in that case; we've handled any side-effects above. */
6938 if (ignore)
6939 return const0_rtx;
6941 /* If we don't have a suitable target, create one to hold the result. */
6942 if (target == NULL || GET_MODE (target) != mode)
6943 target = gen_reg_rtx (mode);
6945 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
6946 val = convert_modes (mode, VOIDmode, val, false);
6948 return targetm.speculation_safe_value (mode, target, val, failsafe);
6951 /* Expand an expression EXP that calls a built-in function,
6952 with result going to TARGET if that's convenient
6953 (and in mode MODE if that's convenient).
6954 SUBTARGET may be used as the target for computing one of EXP's operands.
6955 IGNORE is nonzero if the value is to be ignored. */
6958 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
6959 int ignore)
6961 tree fndecl = get_callee_fndecl (exp);
6962 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6963 int flags;
6965 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6966 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6968 /* When ASan is enabled, we don't want to expand some memory/string
6969 builtins and rely on libsanitizer's hooks. This allows us to avoid
6970 redundant checks and be sure, that possible overflow will be detected
6971 by ASan. */
6973 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6974 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
6975 return expand_call (exp, target, ignore);
6977 /* When not optimizing, generate calls to library functions for a certain
6978 set of builtins. */
6979 if (!optimize
6980 && !called_as_built_in (fndecl)
6981 && fcode != BUILT_IN_FORK
6982 && fcode != BUILT_IN_EXECL
6983 && fcode != BUILT_IN_EXECV
6984 && fcode != BUILT_IN_EXECLP
6985 && fcode != BUILT_IN_EXECLE
6986 && fcode != BUILT_IN_EXECVP
6987 && fcode != BUILT_IN_EXECVE
6988 && fcode != BUILT_IN_CLEAR_CACHE
6989 && !ALLOCA_FUNCTION_CODE_P (fcode)
6990 && fcode != BUILT_IN_FREE)
6991 return expand_call (exp, target, ignore);
6993 /* The built-in function expanders test for target == const0_rtx
6994 to determine whether the function's result will be ignored. */
6995 if (ignore)
6996 target = const0_rtx;
6998 /* If the result of a pure or const built-in function is ignored, and
6999 none of its arguments are volatile, we can avoid expanding the
7000 built-in call and just evaluate the arguments for side-effects. */
7001 if (target == const0_rtx
7002 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7003 && !(flags & ECF_LOOPING_CONST_OR_PURE))
7005 bool volatilep = false;
7006 tree arg;
7007 call_expr_arg_iterator iter;
7009 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7010 if (TREE_THIS_VOLATILE (arg))
7012 volatilep = true;
7013 break;
7016 if (! volatilep)
7018 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7019 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7020 return const0_rtx;
7024 switch (fcode)
7026 CASE_FLT_FN (BUILT_IN_FABS):
7027 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7028 case BUILT_IN_FABSD32:
7029 case BUILT_IN_FABSD64:
7030 case BUILT_IN_FABSD128:
7031 target = expand_builtin_fabs (exp, target, subtarget);
7032 if (target)
7033 return target;
7034 break;
7036 CASE_FLT_FN (BUILT_IN_COPYSIGN):
7037 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7038 target = expand_builtin_copysign (exp, target, subtarget);
7039 if (target)
7040 return target;
7041 break;
7043 /* Just do a normal library call if we were unable to fold
7044 the values. */
7045 CASE_FLT_FN (BUILT_IN_CABS):
7046 break;
7048 CASE_FLT_FN (BUILT_IN_FMA):
7049 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7050 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7051 if (target)
7052 return target;
7053 break;
7055 CASE_FLT_FN (BUILT_IN_ILOGB):
7056 if (! flag_unsafe_math_optimizations)
7057 break;
7058 gcc_fallthrough ();
7059 CASE_FLT_FN (BUILT_IN_ISINF):
7060 CASE_FLT_FN (BUILT_IN_FINITE):
7061 case BUILT_IN_ISFINITE:
7062 case BUILT_IN_ISNORMAL:
7063 target = expand_builtin_interclass_mathfn (exp, target);
7064 if (target)
7065 return target;
7066 break;
7068 CASE_FLT_FN (BUILT_IN_ICEIL):
7069 CASE_FLT_FN (BUILT_IN_LCEIL):
7070 CASE_FLT_FN (BUILT_IN_LLCEIL):
7071 CASE_FLT_FN (BUILT_IN_LFLOOR):
7072 CASE_FLT_FN (BUILT_IN_IFLOOR):
7073 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7074 target = expand_builtin_int_roundingfn (exp, target);
7075 if (target)
7076 return target;
7077 break;
7079 CASE_FLT_FN (BUILT_IN_IRINT):
7080 CASE_FLT_FN (BUILT_IN_LRINT):
7081 CASE_FLT_FN (BUILT_IN_LLRINT):
7082 CASE_FLT_FN (BUILT_IN_IROUND):
7083 CASE_FLT_FN (BUILT_IN_LROUND):
7084 CASE_FLT_FN (BUILT_IN_LLROUND):
7085 target = expand_builtin_int_roundingfn_2 (exp, target);
7086 if (target)
7087 return target;
7088 break;
7090 CASE_FLT_FN (BUILT_IN_POWI):
7091 target = expand_builtin_powi (exp, target);
7092 if (target)
7093 return target;
7094 break;
7096 CASE_FLT_FN (BUILT_IN_CEXPI):
7097 target = expand_builtin_cexpi (exp, target);
7098 gcc_assert (target);
7099 return target;
7101 CASE_FLT_FN (BUILT_IN_SIN):
7102 CASE_FLT_FN (BUILT_IN_COS):
7103 if (! flag_unsafe_math_optimizations)
7104 break;
7105 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7106 if (target)
7107 return target;
7108 break;
7110 CASE_FLT_FN (BUILT_IN_SINCOS):
7111 if (! flag_unsafe_math_optimizations)
7112 break;
7113 target = expand_builtin_sincos (exp);
7114 if (target)
7115 return target;
7116 break;
7118 case BUILT_IN_FEGETROUND:
7119 target = expand_builtin_fegetround (exp, target, target_mode);
7120 if (target)
7121 return target;
7122 break;
7124 case BUILT_IN_FECLEAREXCEPT:
7125 target = expand_builtin_feclear_feraise_except (exp, target, target_mode,
7126 feclearexcept_optab);
7127 if (target)
7128 return target;
7129 break;
7131 case BUILT_IN_FERAISEEXCEPT:
7132 target = expand_builtin_feclear_feraise_except (exp, target, target_mode,
7133 feraiseexcept_optab);
7134 if (target)
7135 return target;
7136 break;
7138 case BUILT_IN_APPLY_ARGS:
7139 return expand_builtin_apply_args ();
7141 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7142 FUNCTION with a copy of the parameters described by
7143 ARGUMENTS, and ARGSIZE. It returns a block of memory
7144 allocated on the stack into which is stored all the registers
7145 that might possibly be used for returning the result of a
7146 function. ARGUMENTS is the value returned by
7147 __builtin_apply_args. ARGSIZE is the number of bytes of
7148 arguments that must be copied. ??? How should this value be
7149 computed? We'll also need a safe worst case value for varargs
7150 functions. */
7151 case BUILT_IN_APPLY:
7152 if (!validate_arglist (exp, POINTER_TYPE,
7153 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7154 && !validate_arglist (exp, REFERENCE_TYPE,
7155 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7156 return const0_rtx;
7157 else
7159 rtx ops[3];
7161 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7162 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7163 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7165 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7168 /* __builtin_return (RESULT) causes the function to return the
7169 value described by RESULT. RESULT is address of the block of
7170 memory returned by __builtin_apply. */
7171 case BUILT_IN_RETURN:
7172 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7173 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7174 return const0_rtx;
7176 case BUILT_IN_SAVEREGS:
7177 return expand_builtin_saveregs ();
7179 case BUILT_IN_VA_ARG_PACK:
7180 /* All valid uses of __builtin_va_arg_pack () are removed during
7181 inlining. */
7182 error ("invalid use of %<__builtin_va_arg_pack ()%>");
7183 return const0_rtx;
7185 case BUILT_IN_VA_ARG_PACK_LEN:
7186 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7187 inlining. */
7188 error ("invalid use of %<__builtin_va_arg_pack_len ()%>");
7189 return const0_rtx;
7191 /* Return the address of the first anonymous stack arg. */
7192 case BUILT_IN_NEXT_ARG:
7193 if (fold_builtin_next_arg (exp, false))
7194 return const0_rtx;
7195 return expand_builtin_next_arg ();
7197 case BUILT_IN_CLEAR_CACHE:
7198 expand_builtin___clear_cache (exp);
7199 return const0_rtx;
7201 case BUILT_IN_CLASSIFY_TYPE:
7202 return expand_builtin_classify_type (exp);
7204 case BUILT_IN_CONSTANT_P:
7205 return const0_rtx;
7207 case BUILT_IN_FRAME_ADDRESS:
7208 case BUILT_IN_RETURN_ADDRESS:
7209 return expand_builtin_frame_address (fndecl, exp);
7211 /* Returns the address of the area where the structure is returned.
7212 0 otherwise. */
7213 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7214 if (call_expr_nargs (exp) != 0
7215 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7216 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7217 return const0_rtx;
7218 else
7219 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7221 CASE_BUILT_IN_ALLOCA:
7222 target = expand_builtin_alloca (exp);
7223 if (target)
7224 return target;
7225 break;
7227 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7228 return expand_asan_emit_allocas_unpoison (exp);
7230 case BUILT_IN_STACK_SAVE:
7231 return expand_stack_save ();
7233 case BUILT_IN_STACK_RESTORE:
7234 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7235 return const0_rtx;
7237 case BUILT_IN_BSWAP16:
7238 case BUILT_IN_BSWAP32:
7239 case BUILT_IN_BSWAP64:
7240 case BUILT_IN_BSWAP128:
7241 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7242 if (target)
7243 return target;
7244 break;
7246 CASE_INT_FN (BUILT_IN_FFS):
7247 target = expand_builtin_unop (target_mode, exp, target,
7248 subtarget, ffs_optab);
7249 if (target)
7250 return target;
7251 break;
7253 CASE_INT_FN (BUILT_IN_CLZ):
7254 target = expand_builtin_unop (target_mode, exp, target,
7255 subtarget, clz_optab);
7256 if (target)
7257 return target;
7258 break;
7260 CASE_INT_FN (BUILT_IN_CTZ):
7261 target = expand_builtin_unop (target_mode, exp, target,
7262 subtarget, ctz_optab);
7263 if (target)
7264 return target;
7265 break;
7267 CASE_INT_FN (BUILT_IN_CLRSB):
7268 target = expand_builtin_unop (target_mode, exp, target,
7269 subtarget, clrsb_optab);
7270 if (target)
7271 return target;
7272 break;
7274 CASE_INT_FN (BUILT_IN_POPCOUNT):
7275 target = expand_builtin_unop (target_mode, exp, target,
7276 subtarget, popcount_optab);
7277 if (target)
7278 return target;
7279 break;
7281 CASE_INT_FN (BUILT_IN_PARITY):
7282 target = expand_builtin_unop (target_mode, exp, target,
7283 subtarget, parity_optab);
7284 if (target)
7285 return target;
7286 break;
7288 case BUILT_IN_STRLEN:
7289 target = expand_builtin_strlen (exp, target, target_mode);
7290 if (target)
7291 return target;
7292 break;
7294 case BUILT_IN_STRNLEN:
7295 target = expand_builtin_strnlen (exp, target, target_mode);
7296 if (target)
7297 return target;
7298 break;
7300 case BUILT_IN_STRCPY:
7301 target = expand_builtin_strcpy (exp, target);
7302 if (target)
7303 return target;
7304 break;
7306 case BUILT_IN_STRNCPY:
7307 target = expand_builtin_strncpy (exp, target);
7308 if (target)
7309 return target;
7310 break;
7312 case BUILT_IN_STPCPY:
7313 target = expand_builtin_stpcpy (exp, target, mode);
7314 if (target)
7315 return target;
7316 break;
7318 case BUILT_IN_MEMCPY:
7319 target = expand_builtin_memcpy (exp, target);
7320 if (target)
7321 return target;
7322 break;
7324 case BUILT_IN_MEMMOVE:
7325 target = expand_builtin_memmove (exp, target);
7326 if (target)
7327 return target;
7328 break;
7330 case BUILT_IN_MEMPCPY:
7331 target = expand_builtin_mempcpy (exp, target);
7332 if (target)
7333 return target;
7334 break;
7336 case BUILT_IN_MEMSET:
7337 target = expand_builtin_memset (exp, target, mode);
7338 if (target)
7339 return target;
7340 break;
7342 case BUILT_IN_BZERO:
7343 target = expand_builtin_bzero (exp);
7344 if (target)
7345 return target;
7346 break;
7348 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7349 back to a BUILT_IN_STRCMP. Remember to delete the 3rd parameter
7350 when changing it to a strcmp call. */
7351 case BUILT_IN_STRCMP_EQ:
7352 target = expand_builtin_memcmp (exp, target, true);
7353 if (target)
7354 return target;
7356 /* Change this call back to a BUILT_IN_STRCMP. */
7357 TREE_OPERAND (exp, 1)
7358 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7360 /* Delete the last parameter. */
7361 unsigned int i;
7362 vec<tree, va_gc> *arg_vec;
7363 vec_alloc (arg_vec, 2);
7364 for (i = 0; i < 2; i++)
7365 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7366 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7367 /* FALLTHROUGH */
7369 case BUILT_IN_STRCMP:
7370 target = expand_builtin_strcmp (exp, target);
7371 if (target)
7372 return target;
7373 break;
7375 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7376 back to a BUILT_IN_STRNCMP. */
7377 case BUILT_IN_STRNCMP_EQ:
7378 target = expand_builtin_memcmp (exp, target, true);
7379 if (target)
7380 return target;
7382 /* Change it back to a BUILT_IN_STRNCMP. */
7383 TREE_OPERAND (exp, 1)
7384 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7385 /* FALLTHROUGH */
7387 case BUILT_IN_STRNCMP:
7388 target = expand_builtin_strncmp (exp, target, mode);
7389 if (target)
7390 return target;
7391 break;
7393 case BUILT_IN_BCMP:
7394 case BUILT_IN_MEMCMP:
7395 case BUILT_IN_MEMCMP_EQ:
7396 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7397 if (target)
7398 return target;
7399 if (fcode == BUILT_IN_MEMCMP_EQ)
7401 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7402 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7404 break;
7406 case BUILT_IN_SETJMP:
7407 /* This should have been lowered to the builtins below. */
7408 gcc_unreachable ();
7410 case BUILT_IN_SETJMP_SETUP:
7411 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7412 and the receiver label. */
7413 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7415 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7416 VOIDmode, EXPAND_NORMAL);
7417 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7418 rtx_insn *label_r = label_rtx (label);
7420 /* This is copied from the handling of non-local gotos. */
7421 expand_builtin_setjmp_setup (buf_addr, label_r);
7422 nonlocal_goto_handler_labels
7423 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7424 nonlocal_goto_handler_labels);
7425 /* ??? Do not let expand_label treat us as such since we would
7426 not want to be both on the list of non-local labels and on
7427 the list of forced labels. */
7428 FORCED_LABEL (label) = 0;
7429 return const0_rtx;
7431 break;
7433 case BUILT_IN_SETJMP_RECEIVER:
7434 /* __builtin_setjmp_receiver is passed the receiver label. */
7435 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7437 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7438 rtx_insn *label_r = label_rtx (label);
7440 expand_builtin_setjmp_receiver (label_r);
7441 return const0_rtx;
7443 break;
7445 /* __builtin_longjmp is passed a pointer to an array of five words.
7446 It's similar to the C library longjmp function but works with
7447 __builtin_setjmp above. */
7448 case BUILT_IN_LONGJMP:
7449 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7451 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7452 VOIDmode, EXPAND_NORMAL);
7453 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7455 if (value != const1_rtx)
7457 error ("%<__builtin_longjmp%> second argument must be 1");
7458 return const0_rtx;
7461 expand_builtin_longjmp (buf_addr, value);
7462 return const0_rtx;
7464 break;
7466 case BUILT_IN_NONLOCAL_GOTO:
7467 target = expand_builtin_nonlocal_goto (exp);
7468 if (target)
7469 return target;
7470 break;
7472 /* This updates the setjmp buffer that is its argument with the value
7473 of the current stack pointer. */
7474 case BUILT_IN_UPDATE_SETJMP_BUF:
7475 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7477 rtx buf_addr
7478 = expand_normal (CALL_EXPR_ARG (exp, 0));
7480 expand_builtin_update_setjmp_buf (buf_addr);
7481 return const0_rtx;
7483 break;
7485 case BUILT_IN_TRAP:
7486 expand_builtin_trap ();
7487 return const0_rtx;
7489 case BUILT_IN_UNREACHABLE:
7490 expand_builtin_unreachable ();
7491 return const0_rtx;
7493 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7494 case BUILT_IN_SIGNBITD32:
7495 case BUILT_IN_SIGNBITD64:
7496 case BUILT_IN_SIGNBITD128:
7497 target = expand_builtin_signbit (exp, target);
7498 if (target)
7499 return target;
7500 break;
7502 /* Various hooks for the DWARF 2 __throw routine. */
7503 case BUILT_IN_UNWIND_INIT:
7504 expand_builtin_unwind_init ();
7505 return const0_rtx;
7506 case BUILT_IN_DWARF_CFA:
7507 return virtual_cfa_rtx;
7508 #ifdef DWARF2_UNWIND_INFO
7509 case BUILT_IN_DWARF_SP_COLUMN:
7510 return expand_builtin_dwarf_sp_column ();
7511 case BUILT_IN_INIT_DWARF_REG_SIZES:
7512 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7513 return const0_rtx;
7514 #endif
7515 case BUILT_IN_FROB_RETURN_ADDR:
7516 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7517 case BUILT_IN_EXTRACT_RETURN_ADDR:
7518 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7519 case BUILT_IN_EH_RETURN:
7520 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7521 CALL_EXPR_ARG (exp, 1));
7522 return const0_rtx;
7523 case BUILT_IN_EH_RETURN_DATA_REGNO:
7524 return expand_builtin_eh_return_data_regno (exp);
7525 case BUILT_IN_EXTEND_POINTER:
7526 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7527 case BUILT_IN_EH_POINTER:
7528 return expand_builtin_eh_pointer (exp);
7529 case BUILT_IN_EH_FILTER:
7530 return expand_builtin_eh_filter (exp);
7531 case BUILT_IN_EH_COPY_VALUES:
7532 return expand_builtin_eh_copy_values (exp);
7534 case BUILT_IN_VA_START:
7535 return expand_builtin_va_start (exp);
7536 case BUILT_IN_VA_END:
7537 return expand_builtin_va_end (exp);
7538 case BUILT_IN_VA_COPY:
7539 return expand_builtin_va_copy (exp);
7540 case BUILT_IN_EXPECT:
7541 return expand_builtin_expect (exp, target);
7542 case BUILT_IN_EXPECT_WITH_PROBABILITY:
7543 return expand_builtin_expect_with_probability (exp, target);
7544 case BUILT_IN_ASSUME_ALIGNED:
7545 return expand_builtin_assume_aligned (exp, target);
7546 case BUILT_IN_PREFETCH:
7547 expand_builtin_prefetch (exp);
7548 return const0_rtx;
7550 case BUILT_IN_INIT_TRAMPOLINE:
7551 return expand_builtin_init_trampoline (exp, true);
7552 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7553 return expand_builtin_init_trampoline (exp, false);
7554 case BUILT_IN_ADJUST_TRAMPOLINE:
7555 return expand_builtin_adjust_trampoline (exp);
7557 case BUILT_IN_INIT_DESCRIPTOR:
7558 return expand_builtin_init_descriptor (exp);
7559 case BUILT_IN_ADJUST_DESCRIPTOR:
7560 return expand_builtin_adjust_descriptor (exp);
7562 case BUILT_IN_FORK:
7563 case BUILT_IN_EXECL:
7564 case BUILT_IN_EXECV:
7565 case BUILT_IN_EXECLP:
7566 case BUILT_IN_EXECLE:
7567 case BUILT_IN_EXECVP:
7568 case BUILT_IN_EXECVE:
7569 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7570 if (target)
7571 return target;
7572 break;
7574 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7575 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7576 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7577 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7578 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7579 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7580 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7581 if (target)
7582 return target;
7583 break;
7585 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7586 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7587 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7588 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7589 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7590 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7591 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7592 if (target)
7593 return target;
7594 break;
7596 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7597 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7598 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7599 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7600 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7601 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7602 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7603 if (target)
7604 return target;
7605 break;
7607 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7608 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7609 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7610 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7611 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7612 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7613 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7614 if (target)
7615 return target;
7616 break;
7618 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7619 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7620 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7621 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7622 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7623 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7624 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7625 if (target)
7626 return target;
7627 break;
7629 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7630 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7631 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7632 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7633 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7634 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7635 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7636 if (target)
7637 return target;
7638 break;
7640 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7641 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7642 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7643 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7644 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7645 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7646 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7647 if (target)
7648 return target;
7649 break;
7651 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7652 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7653 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7654 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7655 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7656 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7657 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7658 if (target)
7659 return target;
7660 break;
7662 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7663 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7664 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7665 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7666 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7667 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7668 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7669 if (target)
7670 return target;
7671 break;
7673 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7674 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7675 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7676 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7677 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7678 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7679 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7680 if (target)
7681 return target;
7682 break;
7684 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7685 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7686 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7687 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7688 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7689 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7690 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7691 if (target)
7692 return target;
7693 break;
7695 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7696 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7697 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7698 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7699 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7700 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7701 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7702 if (target)
7703 return target;
7704 break;
7706 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7707 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7708 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7709 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7710 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7711 if (mode == VOIDmode)
7712 mode = TYPE_MODE (boolean_type_node);
7713 if (!target || !register_operand (target, mode))
7714 target = gen_reg_rtx (mode);
7716 mode = get_builtin_sync_mode
7717 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7718 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7719 if (target)
7720 return target;
7721 break;
7723 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7724 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7725 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7726 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7727 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7728 mode = get_builtin_sync_mode
7729 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7730 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7731 if (target)
7732 return target;
7733 break;
7735 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7736 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7737 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7738 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7739 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7740 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7741 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7742 if (target)
7743 return target;
7744 break;
7746 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7747 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7748 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7749 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7750 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7751 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7752 expand_builtin_sync_lock_release (mode, exp);
7753 return const0_rtx;
7755 case BUILT_IN_SYNC_SYNCHRONIZE:
7756 expand_builtin_sync_synchronize ();
7757 return const0_rtx;
7759 case BUILT_IN_ATOMIC_EXCHANGE_1:
7760 case BUILT_IN_ATOMIC_EXCHANGE_2:
7761 case BUILT_IN_ATOMIC_EXCHANGE_4:
7762 case BUILT_IN_ATOMIC_EXCHANGE_8:
7763 case BUILT_IN_ATOMIC_EXCHANGE_16:
7764 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7765 target = expand_builtin_atomic_exchange (mode, exp, target);
7766 if (target)
7767 return target;
7768 break;
7770 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7771 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7772 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7773 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7774 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7776 unsigned int nargs, z;
7777 vec<tree, va_gc> *vec;
7779 mode =
7780 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7781 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7782 if (target)
7783 return target;
7785 /* If this is turned into an external library call, the weak parameter
7786 must be dropped to match the expected parameter list. */
7787 nargs = call_expr_nargs (exp);
7788 vec_alloc (vec, nargs - 1);
7789 for (z = 0; z < 3; z++)
7790 vec->quick_push (CALL_EXPR_ARG (exp, z));
7791 /* Skip the boolean weak parameter. */
7792 for (z = 4; z < 6; z++)
7793 vec->quick_push (CALL_EXPR_ARG (exp, z));
7794 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7795 break;
7798 case BUILT_IN_ATOMIC_LOAD_1:
7799 case BUILT_IN_ATOMIC_LOAD_2:
7800 case BUILT_IN_ATOMIC_LOAD_4:
7801 case BUILT_IN_ATOMIC_LOAD_8:
7802 case BUILT_IN_ATOMIC_LOAD_16:
7803 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7804 target = expand_builtin_atomic_load (mode, exp, target);
7805 if (target)
7806 return target;
7807 break;
7809 case BUILT_IN_ATOMIC_STORE_1:
7810 case BUILT_IN_ATOMIC_STORE_2:
7811 case BUILT_IN_ATOMIC_STORE_4:
7812 case BUILT_IN_ATOMIC_STORE_8:
7813 case BUILT_IN_ATOMIC_STORE_16:
7814 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7815 target = expand_builtin_atomic_store (mode, exp);
7816 if (target)
7817 return const0_rtx;
7818 break;
7820 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7821 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7822 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7823 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7824 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7826 enum built_in_function lib;
7827 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7828 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7829 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7830 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7831 ignore, lib);
7832 if (target)
7833 return target;
7834 break;
7836 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7837 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7838 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7839 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7840 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7842 enum built_in_function lib;
7843 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7844 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7845 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7846 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7847 ignore, lib);
7848 if (target)
7849 return target;
7850 break;
7852 case BUILT_IN_ATOMIC_AND_FETCH_1:
7853 case BUILT_IN_ATOMIC_AND_FETCH_2:
7854 case BUILT_IN_ATOMIC_AND_FETCH_4:
7855 case BUILT_IN_ATOMIC_AND_FETCH_8:
7856 case BUILT_IN_ATOMIC_AND_FETCH_16:
7858 enum built_in_function lib;
7859 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7860 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7861 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7862 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7863 ignore, lib);
7864 if (target)
7865 return target;
7866 break;
7868 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7869 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7870 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7871 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7872 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7874 enum built_in_function lib;
7875 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7876 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7877 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7878 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7879 ignore, lib);
7880 if (target)
7881 return target;
7882 break;
7884 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7885 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7886 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7887 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7888 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7890 enum built_in_function lib;
7891 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7892 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7893 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7894 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7895 ignore, lib);
7896 if (target)
7897 return target;
7898 break;
7900 case BUILT_IN_ATOMIC_OR_FETCH_1:
7901 case BUILT_IN_ATOMIC_OR_FETCH_2:
7902 case BUILT_IN_ATOMIC_OR_FETCH_4:
7903 case BUILT_IN_ATOMIC_OR_FETCH_8:
7904 case BUILT_IN_ATOMIC_OR_FETCH_16:
7906 enum built_in_function lib;
7907 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7908 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7909 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7910 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7911 ignore, lib);
7912 if (target)
7913 return target;
7914 break;
7916 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7917 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7918 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7919 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7920 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7921 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7922 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7923 ignore, BUILT_IN_NONE);
7924 if (target)
7925 return target;
7926 break;
7928 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7929 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7930 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7931 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7932 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7933 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7934 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7935 ignore, BUILT_IN_NONE);
7936 if (target)
7937 return target;
7938 break;
7940 case BUILT_IN_ATOMIC_FETCH_AND_1:
7941 case BUILT_IN_ATOMIC_FETCH_AND_2:
7942 case BUILT_IN_ATOMIC_FETCH_AND_4:
7943 case BUILT_IN_ATOMIC_FETCH_AND_8:
7944 case BUILT_IN_ATOMIC_FETCH_AND_16:
7945 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7946 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7947 ignore, BUILT_IN_NONE);
7948 if (target)
7949 return target;
7950 break;
7952 case BUILT_IN_ATOMIC_FETCH_NAND_1:
7953 case BUILT_IN_ATOMIC_FETCH_NAND_2:
7954 case BUILT_IN_ATOMIC_FETCH_NAND_4:
7955 case BUILT_IN_ATOMIC_FETCH_NAND_8:
7956 case BUILT_IN_ATOMIC_FETCH_NAND_16:
7957 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
7958 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
7959 ignore, BUILT_IN_NONE);
7960 if (target)
7961 return target;
7962 break;
7964 case BUILT_IN_ATOMIC_FETCH_XOR_1:
7965 case BUILT_IN_ATOMIC_FETCH_XOR_2:
7966 case BUILT_IN_ATOMIC_FETCH_XOR_4:
7967 case BUILT_IN_ATOMIC_FETCH_XOR_8:
7968 case BUILT_IN_ATOMIC_FETCH_XOR_16:
7969 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
7970 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
7971 ignore, BUILT_IN_NONE);
7972 if (target)
7973 return target;
7974 break;
7976 case BUILT_IN_ATOMIC_FETCH_OR_1:
7977 case BUILT_IN_ATOMIC_FETCH_OR_2:
7978 case BUILT_IN_ATOMIC_FETCH_OR_4:
7979 case BUILT_IN_ATOMIC_FETCH_OR_8:
7980 case BUILT_IN_ATOMIC_FETCH_OR_16:
7981 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
7982 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
7983 ignore, BUILT_IN_NONE);
7984 if (target)
7985 return target;
7986 break;
7988 case BUILT_IN_ATOMIC_TEST_AND_SET:
7989 return expand_builtin_atomic_test_and_set (exp, target);
7991 case BUILT_IN_ATOMIC_CLEAR:
7992 return expand_builtin_atomic_clear (exp);
7994 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
7995 return expand_builtin_atomic_always_lock_free (exp);
7997 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
7998 target = expand_builtin_atomic_is_lock_free (exp);
7999 if (target)
8000 return target;
8001 break;
8003 case BUILT_IN_ATOMIC_THREAD_FENCE:
8004 expand_builtin_atomic_thread_fence (exp);
8005 return const0_rtx;
8007 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8008 expand_builtin_atomic_signal_fence (exp);
8009 return const0_rtx;
8011 case BUILT_IN_OBJECT_SIZE:
8012 case BUILT_IN_DYNAMIC_OBJECT_SIZE:
8013 return expand_builtin_object_size (exp);
8015 case BUILT_IN_MEMCPY_CHK:
8016 case BUILT_IN_MEMPCPY_CHK:
8017 case BUILT_IN_MEMMOVE_CHK:
8018 case BUILT_IN_MEMSET_CHK:
8019 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8020 if (target)
8021 return target;
8022 break;
8024 case BUILT_IN_STRCPY_CHK:
8025 case BUILT_IN_STPCPY_CHK:
8026 case BUILT_IN_STRNCPY_CHK:
8027 case BUILT_IN_STPNCPY_CHK:
8028 case BUILT_IN_STRCAT_CHK:
8029 case BUILT_IN_STRNCAT_CHK:
8030 case BUILT_IN_SNPRINTF_CHK:
8031 case BUILT_IN_VSNPRINTF_CHK:
8032 maybe_emit_chk_warning (exp, fcode);
8033 break;
8035 case BUILT_IN_SPRINTF_CHK:
8036 case BUILT_IN_VSPRINTF_CHK:
8037 maybe_emit_sprintf_chk_warning (exp, fcode);
8038 break;
8040 case BUILT_IN_THREAD_POINTER:
8041 return expand_builtin_thread_pointer (exp, target);
8043 case BUILT_IN_SET_THREAD_POINTER:
8044 expand_builtin_set_thread_pointer (exp);
8045 return const0_rtx;
8047 case BUILT_IN_ACC_ON_DEVICE:
8048 /* Do library call, if we failed to expand the builtin when
8049 folding. */
8050 break;
8052 case BUILT_IN_GOACC_PARLEVEL_ID:
8053 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8054 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8056 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8057 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8059 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8060 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8061 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8062 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8063 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8064 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8065 return expand_speculation_safe_value (mode, exp, target, ignore);
8067 default: /* just do library call, if unknown builtin */
8068 break;
8071 /* The switch statement above can drop through to cause the function
8072 to be called normally. */
8073 return expand_call (exp, target, ignore);
8076 /* Determine whether a tree node represents a call to a built-in
8077 function. If the tree T is a call to a built-in function with
8078 the right number of arguments of the appropriate types, return
8079 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8080 Otherwise the return value is END_BUILTINS. */
8082 enum built_in_function
8083 builtin_mathfn_code (const_tree t)
8085 const_tree fndecl, arg, parmlist;
8086 const_tree argtype, parmtype;
8087 const_call_expr_arg_iterator iter;
8089 if (TREE_CODE (t) != CALL_EXPR)
8090 return END_BUILTINS;
8092 fndecl = get_callee_fndecl (t);
8093 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8094 return END_BUILTINS;
8096 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8097 init_const_call_expr_arg_iterator (t, &iter);
8098 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8100 /* If a function doesn't take a variable number of arguments,
8101 the last element in the list will have type `void'. */
8102 parmtype = TREE_VALUE (parmlist);
8103 if (VOID_TYPE_P (parmtype))
8105 if (more_const_call_expr_args_p (&iter))
8106 return END_BUILTINS;
8107 return DECL_FUNCTION_CODE (fndecl);
8110 if (! more_const_call_expr_args_p (&iter))
8111 return END_BUILTINS;
8113 arg = next_const_call_expr_arg (&iter);
8114 argtype = TREE_TYPE (arg);
8116 if (SCALAR_FLOAT_TYPE_P (parmtype))
8118 if (! SCALAR_FLOAT_TYPE_P (argtype))
8119 return END_BUILTINS;
8121 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8123 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8124 return END_BUILTINS;
8126 else if (POINTER_TYPE_P (parmtype))
8128 if (! POINTER_TYPE_P (argtype))
8129 return END_BUILTINS;
8131 else if (INTEGRAL_TYPE_P (parmtype))
8133 if (! INTEGRAL_TYPE_P (argtype))
8134 return END_BUILTINS;
8136 else
8137 return END_BUILTINS;
8140 /* Variable-length argument list. */
8141 return DECL_FUNCTION_CODE (fndecl);
8144 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8145 evaluate to a constant. */
8147 static tree
8148 fold_builtin_constant_p (tree arg)
8150 /* We return 1 for a numeric type that's known to be a constant
8151 value at compile-time or for an aggregate type that's a
8152 literal constant. */
8153 STRIP_NOPS (arg);
8155 /* If we know this is a constant, emit the constant of one. */
8156 if (CONSTANT_CLASS_P (arg)
8157 || (TREE_CODE (arg) == CONSTRUCTOR
8158 && TREE_CONSTANT (arg)))
8159 return integer_one_node;
8160 if (TREE_CODE (arg) == ADDR_EXPR)
8162 tree op = TREE_OPERAND (arg, 0);
8163 if (TREE_CODE (op) == STRING_CST
8164 || (TREE_CODE (op) == ARRAY_REF
8165 && integer_zerop (TREE_OPERAND (op, 1))
8166 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8167 return integer_one_node;
8170 /* If this expression has side effects, show we don't know it to be a
8171 constant. Likewise if it's a pointer or aggregate type since in
8172 those case we only want literals, since those are only optimized
8173 when generating RTL, not later.
8174 And finally, if we are compiling an initializer, not code, we
8175 need to return a definite result now; there's not going to be any
8176 more optimization done. */
8177 if (TREE_SIDE_EFFECTS (arg)
8178 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8179 || POINTER_TYPE_P (TREE_TYPE (arg))
8180 || cfun == 0
8181 || folding_initializer
8182 || force_folding_builtin_constant_p)
8183 return integer_zero_node;
8185 return NULL_TREE;
8188 /* Create builtin_expect or builtin_expect_with_probability
8189 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8190 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8191 builtin_expect_with_probability instead uses third argument as PROBABILITY
8192 value. */
8194 static tree
8195 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8196 tree predictor, tree probability)
8198 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
8200 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
8201 : BUILT_IN_EXPECT_WITH_PROBABILITY);
8202 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8203 ret_type = TREE_TYPE (TREE_TYPE (fn));
8204 pred_type = TREE_VALUE (arg_types);
8205 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8207 pred = fold_convert_loc (loc, pred_type, pred);
8208 expected = fold_convert_loc (loc, expected_type, expected);
8210 if (probability)
8211 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
8212 else
8213 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8214 predictor);
8216 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8217 build_int_cst (ret_type, 0));
8220 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
8221 NULL_TREE if no simplification is possible. */
8223 tree
8224 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
8225 tree arg3)
8227 tree inner, fndecl, inner_arg0;
8228 enum tree_code code;
8230 /* Distribute the expected value over short-circuiting operators.
8231 See through the cast from truthvalue_type_node to long. */
8232 inner_arg0 = arg0;
8233 while (CONVERT_EXPR_P (inner_arg0)
8234 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8235 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8236 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8238 /* If this is a builtin_expect within a builtin_expect keep the
8239 inner one. See through a comparison against a constant. It
8240 might have been added to create a thruthvalue. */
8241 inner = inner_arg0;
8243 if (COMPARISON_CLASS_P (inner)
8244 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8245 inner = TREE_OPERAND (inner, 0);
8247 if (TREE_CODE (inner) == CALL_EXPR
8248 && (fndecl = get_callee_fndecl (inner))
8249 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
8250 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
8251 return arg0;
8253 inner = inner_arg0;
8254 code = TREE_CODE (inner);
8255 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8257 tree op0 = TREE_OPERAND (inner, 0);
8258 tree op1 = TREE_OPERAND (inner, 1);
8259 arg1 = save_expr (arg1);
8261 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
8262 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
8263 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8265 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8268 /* If the argument isn't invariant then there's nothing else we can do. */
8269 if (!TREE_CONSTANT (inner_arg0))
8270 return NULL_TREE;
8272 /* If we expect that a comparison against the argument will fold to
8273 a constant return the constant. In practice, this means a true
8274 constant or the address of a non-weak symbol. */
8275 inner = inner_arg0;
8276 STRIP_NOPS (inner);
8277 if (TREE_CODE (inner) == ADDR_EXPR)
8281 inner = TREE_OPERAND (inner, 0);
8283 while (TREE_CODE (inner) == COMPONENT_REF
8284 || TREE_CODE (inner) == ARRAY_REF);
8285 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8286 return NULL_TREE;
8289 /* Otherwise, ARG0 already has the proper type for the return value. */
8290 return arg0;
8293 /* Fold a call to __builtin_classify_type with argument ARG. */
8295 static tree
8296 fold_builtin_classify_type (tree arg)
8298 if (arg == 0)
8299 return build_int_cst (integer_type_node, no_type_class);
8301 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8304 /* Fold a call EXPR (which may be null) to __builtin_strlen with argument
8305 ARG. */
8307 static tree
8308 fold_builtin_strlen (location_t loc, tree expr, tree type, tree arg)
8310 if (!validate_arg (arg, POINTER_TYPE))
8311 return NULL_TREE;
8312 else
8314 c_strlen_data lendata = { };
8315 tree len = c_strlen (arg, 0, &lendata);
8317 if (len)
8318 return fold_convert_loc (loc, type, len);
8320 /* TODO: Move this to gimple-ssa-warn-access once the pass runs
8321 also early enough to detect invalid reads in multimensional
8322 arrays and struct members. */
8323 if (!lendata.decl)
8324 c_strlen (arg, 1, &lendata);
8326 if (lendata.decl)
8328 if (EXPR_HAS_LOCATION (arg))
8329 loc = EXPR_LOCATION (arg);
8330 else if (loc == UNKNOWN_LOCATION)
8331 loc = input_location;
8332 warn_string_no_nul (loc, expr, "strlen", arg, lendata.decl);
8335 return NULL_TREE;
8339 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8341 static tree
8342 fold_builtin_inf (location_t loc, tree type, int warn)
8344 REAL_VALUE_TYPE real;
8346 /* __builtin_inff is intended to be usable to define INFINITY on all
8347 targets. If an infinity is not available, INFINITY expands "to a
8348 positive constant of type float that overflows at translation
8349 time", footnote "In this case, using INFINITY will violate the
8350 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8351 Thus we pedwarn to ensure this constraint violation is
8352 diagnosed. */
8353 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8354 pedwarn (loc, 0, "target format does not support infinity");
8356 real_inf (&real);
8357 return build_real (type, real);
8360 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8361 NULL_TREE if no simplification can be made. */
8363 static tree
8364 fold_builtin_sincos (location_t loc,
8365 tree arg0, tree arg1, tree arg2)
8367 tree type;
8368 tree fndecl, call = NULL_TREE;
8370 if (!validate_arg (arg0, REAL_TYPE)
8371 || !validate_arg (arg1, POINTER_TYPE)
8372 || !validate_arg (arg2, POINTER_TYPE))
8373 return NULL_TREE;
8375 type = TREE_TYPE (arg0);
8377 /* Calculate the result when the argument is a constant. */
8378 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8379 if (fn == END_BUILTINS)
8380 return NULL_TREE;
8382 /* Canonicalize sincos to cexpi. */
8383 if (TREE_CODE (arg0) == REAL_CST)
8385 tree complex_type = build_complex_type (type);
8386 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8388 if (!call)
8390 if (!targetm.libc_has_function (function_c99_math_complex, type)
8391 || !builtin_decl_implicit_p (fn))
8392 return NULL_TREE;
8393 fndecl = builtin_decl_explicit (fn);
8394 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8395 call = builtin_save_expr (call);
8398 tree ptype = build_pointer_type (type);
8399 arg1 = fold_convert (ptype, arg1);
8400 arg2 = fold_convert (ptype, arg2);
8401 return build2 (COMPOUND_EXPR, void_type_node,
8402 build2 (MODIFY_EXPR, void_type_node,
8403 build_fold_indirect_ref_loc (loc, arg1),
8404 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8405 build2 (MODIFY_EXPR, void_type_node,
8406 build_fold_indirect_ref_loc (loc, arg2),
8407 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8410 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8411 Return NULL_TREE if no simplification can be made. */
8413 static tree
8414 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8416 if (!validate_arg (arg1, POINTER_TYPE)
8417 || !validate_arg (arg2, POINTER_TYPE)
8418 || !validate_arg (len, INTEGER_TYPE))
8419 return NULL_TREE;
8421 /* If the LEN parameter is zero, return zero. */
8422 if (integer_zerop (len))
8423 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8424 arg1, arg2);
8426 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8427 if (operand_equal_p (arg1, arg2, 0))
8428 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8430 /* If len parameter is one, return an expression corresponding to
8431 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8432 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8434 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8435 tree cst_uchar_ptr_node
8436 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8438 tree ind1
8439 = fold_convert_loc (loc, integer_type_node,
8440 build1 (INDIRECT_REF, cst_uchar_node,
8441 fold_convert_loc (loc,
8442 cst_uchar_ptr_node,
8443 arg1)));
8444 tree ind2
8445 = fold_convert_loc (loc, integer_type_node,
8446 build1 (INDIRECT_REF, cst_uchar_node,
8447 fold_convert_loc (loc,
8448 cst_uchar_ptr_node,
8449 arg2)));
8450 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8453 return NULL_TREE;
8456 /* Fold a call to builtin isascii with argument ARG. */
8458 static tree
8459 fold_builtin_isascii (location_t loc, tree arg)
8461 if (!validate_arg (arg, INTEGER_TYPE))
8462 return NULL_TREE;
8463 else
8465 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8466 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8467 build_int_cst (integer_type_node,
8468 ~ (unsigned HOST_WIDE_INT) 0x7f));
8469 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8470 arg, integer_zero_node);
8474 /* Fold a call to builtin toascii with argument ARG. */
8476 static tree
8477 fold_builtin_toascii (location_t loc, tree arg)
8479 if (!validate_arg (arg, INTEGER_TYPE))
8480 return NULL_TREE;
8482 /* Transform toascii(c) -> (c & 0x7f). */
8483 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8484 build_int_cst (integer_type_node, 0x7f));
8487 /* Fold a call to builtin isdigit with argument ARG. */
8489 static tree
8490 fold_builtin_isdigit (location_t loc, tree arg)
8492 if (!validate_arg (arg, INTEGER_TYPE))
8493 return NULL_TREE;
8494 else
8496 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8497 /* According to the C standard, isdigit is unaffected by locale.
8498 However, it definitely is affected by the target character set. */
8499 unsigned HOST_WIDE_INT target_digit0
8500 = lang_hooks.to_target_charset ('0');
8502 if (target_digit0 == 0)
8503 return NULL_TREE;
8505 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8506 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8507 build_int_cst (unsigned_type_node, target_digit0));
8508 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8509 build_int_cst (unsigned_type_node, 9));
8513 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8515 static tree
8516 fold_builtin_fabs (location_t loc, tree arg, tree type)
8518 if (!validate_arg (arg, REAL_TYPE))
8519 return NULL_TREE;
8521 arg = fold_convert_loc (loc, type, arg);
8522 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8525 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8527 static tree
8528 fold_builtin_abs (location_t loc, tree arg, tree type)
8530 if (!validate_arg (arg, INTEGER_TYPE))
8531 return NULL_TREE;
8533 arg = fold_convert_loc (loc, type, arg);
8534 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8537 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8539 static tree
8540 fold_builtin_carg (location_t loc, tree arg, tree type)
8542 if (validate_arg (arg, COMPLEX_TYPE)
8543 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8545 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8547 if (atan2_fn)
8549 tree new_arg = builtin_save_expr (arg);
8550 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8551 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8552 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8556 return NULL_TREE;
8559 /* Fold a call to builtin frexp, we can assume the base is 2. */
8561 static tree
8562 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8564 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8565 return NULL_TREE;
8567 STRIP_NOPS (arg0);
8569 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8570 return NULL_TREE;
8572 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8574 /* Proceed if a valid pointer type was passed in. */
8575 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8577 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8578 tree frac, exp;
8580 switch (value->cl)
8582 case rvc_zero:
8583 /* For +-0, return (*exp = 0, +-0). */
8584 exp = integer_zero_node;
8585 frac = arg0;
8586 break;
8587 case rvc_nan:
8588 case rvc_inf:
8589 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8590 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8591 case rvc_normal:
8593 /* Since the frexp function always expects base 2, and in
8594 GCC normalized significands are already in the range
8595 [0.5, 1.0), we have exactly what frexp wants. */
8596 REAL_VALUE_TYPE frac_rvt = *value;
8597 SET_REAL_EXP (&frac_rvt, 0);
8598 frac = build_real (rettype, frac_rvt);
8599 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8601 break;
8602 default:
8603 gcc_unreachable ();
8606 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8607 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8608 TREE_SIDE_EFFECTS (arg1) = 1;
8609 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8612 return NULL_TREE;
8615 /* Fold a call to builtin modf. */
8617 static tree
8618 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8620 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8621 return NULL_TREE;
8623 STRIP_NOPS (arg0);
8625 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8626 return NULL_TREE;
8628 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8630 /* Proceed if a valid pointer type was passed in. */
8631 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8633 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8634 REAL_VALUE_TYPE trunc, frac;
8636 switch (value->cl)
8638 case rvc_nan:
8639 case rvc_zero:
8640 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8641 trunc = frac = *value;
8642 break;
8643 case rvc_inf:
8644 /* For +-Inf, return (*arg1 = arg0, +-0). */
8645 frac = dconst0;
8646 frac.sign = value->sign;
8647 trunc = *value;
8648 break;
8649 case rvc_normal:
8650 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8651 real_trunc (&trunc, VOIDmode, value);
8652 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8653 /* If the original number was negative and already
8654 integral, then the fractional part is -0.0. */
8655 if (value->sign && frac.cl == rvc_zero)
8656 frac.sign = value->sign;
8657 break;
8660 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8661 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8662 build_real (rettype, trunc));
8663 TREE_SIDE_EFFECTS (arg1) = 1;
8664 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8665 build_real (rettype, frac));
8668 return NULL_TREE;
8671 /* Given a location LOC, an interclass builtin function decl FNDECL
8672 and its single argument ARG, return an folded expression computing
8673 the same, or NULL_TREE if we either couldn't or didn't want to fold
8674 (the latter happen if there's an RTL instruction available). */
8676 static tree
8677 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8679 machine_mode mode;
8681 if (!validate_arg (arg, REAL_TYPE))
8682 return NULL_TREE;
8684 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8685 return NULL_TREE;
8687 mode = TYPE_MODE (TREE_TYPE (arg));
8689 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8691 /* If there is no optab, try generic code. */
8692 switch (DECL_FUNCTION_CODE (fndecl))
8694 tree result;
8696 CASE_FLT_FN (BUILT_IN_ISINF):
8698 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8699 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8700 tree type = TREE_TYPE (arg);
8701 REAL_VALUE_TYPE r;
8702 char buf[128];
8704 if (is_ibm_extended)
8706 /* NaN and Inf are encoded in the high-order double value
8707 only. The low-order value is not significant. */
8708 type = double_type_node;
8709 mode = DFmode;
8710 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8712 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
8713 real_from_string (&r, buf);
8714 result = build_call_expr (isgr_fn, 2,
8715 fold_build1_loc (loc, ABS_EXPR, type, arg),
8716 build_real (type, r));
8717 return result;
8719 CASE_FLT_FN (BUILT_IN_FINITE):
8720 case BUILT_IN_ISFINITE:
8722 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8723 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8724 tree type = TREE_TYPE (arg);
8725 REAL_VALUE_TYPE r;
8726 char buf[128];
8728 if (is_ibm_extended)
8730 /* NaN and Inf are encoded in the high-order double value
8731 only. The low-order value is not significant. */
8732 type = double_type_node;
8733 mode = DFmode;
8734 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8736 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
8737 real_from_string (&r, buf);
8738 result = build_call_expr (isle_fn, 2,
8739 fold_build1_loc (loc, ABS_EXPR, type, arg),
8740 build_real (type, r));
8741 /*result = fold_build2_loc (loc, UNGT_EXPR,
8742 TREE_TYPE (TREE_TYPE (fndecl)),
8743 fold_build1_loc (loc, ABS_EXPR, type, arg),
8744 build_real (type, r));
8745 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8746 TREE_TYPE (TREE_TYPE (fndecl)),
8747 result);*/
8748 return result;
8750 case BUILT_IN_ISNORMAL:
8752 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8753 islessequal(fabs(x),DBL_MAX). */
8754 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8755 tree type = TREE_TYPE (arg);
8756 tree orig_arg, max_exp, min_exp;
8757 machine_mode orig_mode = mode;
8758 REAL_VALUE_TYPE rmax, rmin;
8759 char buf[128];
8761 orig_arg = arg = builtin_save_expr (arg);
8762 if (is_ibm_extended)
8764 /* Use double to test the normal range of IBM extended
8765 precision. Emin for IBM extended precision is
8766 different to emin for IEEE double, being 53 higher
8767 since the low double exponent is at least 53 lower
8768 than the high double exponent. */
8769 type = double_type_node;
8770 mode = DFmode;
8771 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8773 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8775 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
8776 real_from_string (&rmax, buf);
8777 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8778 real_from_string (&rmin, buf);
8779 max_exp = build_real (type, rmax);
8780 min_exp = build_real (type, rmin);
8782 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8783 if (is_ibm_extended)
8785 /* Testing the high end of the range is done just using
8786 the high double, using the same test as isfinite().
8787 For the subnormal end of the range we first test the
8788 high double, then if its magnitude is equal to the
8789 limit of 0x1p-969, we test whether the low double is
8790 non-zero and opposite sign to the high double. */
8791 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8792 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8793 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8794 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8795 arg, min_exp);
8796 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8797 complex_double_type_node, orig_arg);
8798 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8799 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8800 tree zero = build_real (type, dconst0);
8801 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8802 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8803 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8804 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8805 fold_build3 (COND_EXPR,
8806 integer_type_node,
8807 hilt, logt, lolt));
8808 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8809 eq_min, ok_lo);
8810 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8811 gt_min, eq_min);
8813 else
8815 tree const isge_fn
8816 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8817 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8819 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8820 max_exp, min_exp);
8821 return result;
8823 default:
8824 break;
8827 return NULL_TREE;
8830 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8831 ARG is the argument for the call. */
8833 static tree
8834 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8836 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8838 if (!validate_arg (arg, REAL_TYPE))
8839 return NULL_TREE;
8841 switch (builtin_index)
8843 case BUILT_IN_ISINF:
8844 if (tree_expr_infinite_p (arg))
8845 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8846 if (!tree_expr_maybe_infinite_p (arg))
8847 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8848 return NULL_TREE;
8850 case BUILT_IN_ISINF_SIGN:
8852 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8853 /* In a boolean context, GCC will fold the inner COND_EXPR to
8854 1. So e.g. "if (isinf_sign(x))" would be folded to just
8855 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8856 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8857 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8858 tree tmp = NULL_TREE;
8860 arg = builtin_save_expr (arg);
8862 if (signbit_fn && isinf_fn)
8864 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8865 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8867 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8868 signbit_call, integer_zero_node);
8869 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8870 isinf_call, integer_zero_node);
8872 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8873 integer_minus_one_node, integer_one_node);
8874 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8875 isinf_call, tmp,
8876 integer_zero_node);
8879 return tmp;
8882 case BUILT_IN_ISFINITE:
8883 if (tree_expr_finite_p (arg))
8884 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8885 if (tree_expr_nan_p (arg) || tree_expr_infinite_p (arg))
8886 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8887 return NULL_TREE;
8889 case BUILT_IN_ISNAN:
8890 if (tree_expr_nan_p (arg))
8891 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8892 if (!tree_expr_maybe_nan_p (arg))
8893 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8896 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8897 if (is_ibm_extended)
8899 /* NaN and Inf are encoded in the high-order double value
8900 only. The low-order value is not significant. */
8901 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8904 arg = builtin_save_expr (arg);
8905 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8907 default:
8908 gcc_unreachable ();
8912 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8913 This builtin will generate code to return the appropriate floating
8914 point classification depending on the value of the floating point
8915 number passed in. The possible return values must be supplied as
8916 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8917 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8918 one floating point argument which is "type generic". */
8920 static tree
8921 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8923 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8924 arg, type, res, tmp;
8925 machine_mode mode;
8926 REAL_VALUE_TYPE r;
8927 char buf[128];
8929 /* Verify the required arguments in the original call. */
8930 if (nargs != 6
8931 || !validate_arg (args[0], INTEGER_TYPE)
8932 || !validate_arg (args[1], INTEGER_TYPE)
8933 || !validate_arg (args[2], INTEGER_TYPE)
8934 || !validate_arg (args[3], INTEGER_TYPE)
8935 || !validate_arg (args[4], INTEGER_TYPE)
8936 || !validate_arg (args[5], REAL_TYPE))
8937 return NULL_TREE;
8939 fp_nan = args[0];
8940 fp_infinite = args[1];
8941 fp_normal = args[2];
8942 fp_subnormal = args[3];
8943 fp_zero = args[4];
8944 arg = args[5];
8945 type = TREE_TYPE (arg);
8946 mode = TYPE_MODE (type);
8947 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8949 /* fpclassify(x) ->
8950 isnan(x) ? FP_NAN :
8951 (fabs(x) == Inf ? FP_INFINITE :
8952 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8953 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8955 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8956 build_real (type, dconst0));
8957 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8958 tmp, fp_zero, fp_subnormal);
8960 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8961 real_from_string (&r, buf);
8962 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8963 arg, build_real (type, r));
8964 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8966 if (tree_expr_maybe_infinite_p (arg))
8968 real_inf (&r);
8969 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8970 build_real (type, r));
8971 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8972 fp_infinite, res);
8975 if (tree_expr_maybe_nan_p (arg))
8977 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8978 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8981 return res;
8984 /* Fold a call to an unordered comparison function such as
8985 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8986 being called and ARG0 and ARG1 are the arguments for the call.
8987 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8988 the opposite of the desired result. UNORDERED_CODE is used
8989 for modes that can hold NaNs and ORDERED_CODE is used for
8990 the rest. */
8992 static tree
8993 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8994 enum tree_code unordered_code,
8995 enum tree_code ordered_code)
8997 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8998 enum tree_code code;
8999 tree type0, type1;
9000 enum tree_code code0, code1;
9001 tree cmp_type = NULL_TREE;
9003 type0 = TREE_TYPE (arg0);
9004 type1 = TREE_TYPE (arg1);
9006 code0 = TREE_CODE (type0);
9007 code1 = TREE_CODE (type1);
9009 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9010 /* Choose the wider of two real types. */
9011 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9012 ? type0 : type1;
9013 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9014 cmp_type = type0;
9015 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9016 cmp_type = type1;
9018 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9019 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9021 if (unordered_code == UNORDERED_EXPR)
9023 if (tree_expr_nan_p (arg0) || tree_expr_nan_p (arg1))
9024 return omit_two_operands_loc (loc, type, integer_one_node, arg0, arg1);
9025 if (!tree_expr_maybe_nan_p (arg0) && !tree_expr_maybe_nan_p (arg1))
9026 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9027 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9030 code = (tree_expr_maybe_nan_p (arg0) || tree_expr_maybe_nan_p (arg1))
9031 ? unordered_code : ordered_code;
9032 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9033 fold_build2_loc (loc, code, type, arg0, arg1));
9036 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9037 arithmetics if it can never overflow, or into internal functions that
9038 return both result of arithmetics and overflowed boolean flag in
9039 a complex integer result, or some other check for overflow.
9040 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9041 checking part of that. */
9043 static tree
9044 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9045 tree arg0, tree arg1, tree arg2)
9047 enum internal_fn ifn = IFN_LAST;
9048 /* The code of the expression corresponding to the built-in. */
9049 enum tree_code opcode = ERROR_MARK;
9050 bool ovf_only = false;
9052 switch (fcode)
9054 case BUILT_IN_ADD_OVERFLOW_P:
9055 ovf_only = true;
9056 /* FALLTHRU */
9057 case BUILT_IN_ADD_OVERFLOW:
9058 case BUILT_IN_SADD_OVERFLOW:
9059 case BUILT_IN_SADDL_OVERFLOW:
9060 case BUILT_IN_SADDLL_OVERFLOW:
9061 case BUILT_IN_UADD_OVERFLOW:
9062 case BUILT_IN_UADDL_OVERFLOW:
9063 case BUILT_IN_UADDLL_OVERFLOW:
9064 opcode = PLUS_EXPR;
9065 ifn = IFN_ADD_OVERFLOW;
9066 break;
9067 case BUILT_IN_SUB_OVERFLOW_P:
9068 ovf_only = true;
9069 /* FALLTHRU */
9070 case BUILT_IN_SUB_OVERFLOW:
9071 case BUILT_IN_SSUB_OVERFLOW:
9072 case BUILT_IN_SSUBL_OVERFLOW:
9073 case BUILT_IN_SSUBLL_OVERFLOW:
9074 case BUILT_IN_USUB_OVERFLOW:
9075 case BUILT_IN_USUBL_OVERFLOW:
9076 case BUILT_IN_USUBLL_OVERFLOW:
9077 opcode = MINUS_EXPR;
9078 ifn = IFN_SUB_OVERFLOW;
9079 break;
9080 case BUILT_IN_MUL_OVERFLOW_P:
9081 ovf_only = true;
9082 /* FALLTHRU */
9083 case BUILT_IN_MUL_OVERFLOW:
9084 case BUILT_IN_SMUL_OVERFLOW:
9085 case BUILT_IN_SMULL_OVERFLOW:
9086 case BUILT_IN_SMULLL_OVERFLOW:
9087 case BUILT_IN_UMUL_OVERFLOW:
9088 case BUILT_IN_UMULL_OVERFLOW:
9089 case BUILT_IN_UMULLL_OVERFLOW:
9090 opcode = MULT_EXPR;
9091 ifn = IFN_MUL_OVERFLOW;
9092 break;
9093 default:
9094 gcc_unreachable ();
9097 /* For the "generic" overloads, the first two arguments can have different
9098 types and the last argument determines the target type to use to check
9099 for overflow. The arguments of the other overloads all have the same
9100 type. */
9101 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9103 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9104 arguments are constant, attempt to fold the built-in call into a constant
9105 expression indicating whether or not it detected an overflow. */
9106 if (ovf_only
9107 && TREE_CODE (arg0) == INTEGER_CST
9108 && TREE_CODE (arg1) == INTEGER_CST)
9109 /* Perform the computation in the target type and check for overflow. */
9110 return omit_one_operand_loc (loc, boolean_type_node,
9111 arith_overflowed_p (opcode, type, arg0, arg1)
9112 ? boolean_true_node : boolean_false_node,
9113 arg2);
9115 tree intres, ovfres;
9116 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9118 intres = fold_binary_loc (loc, opcode, type,
9119 fold_convert_loc (loc, type, arg0),
9120 fold_convert_loc (loc, type, arg1));
9121 if (TREE_OVERFLOW (intres))
9122 intres = drop_tree_overflow (intres);
9123 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
9124 ? boolean_true_node : boolean_false_node);
9126 else
9128 tree ctype = build_complex_type (type);
9129 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
9130 arg0, arg1);
9131 tree tgt = save_expr (call);
9132 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9133 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9134 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9137 if (ovf_only)
9138 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9140 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9141 tree store
9142 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9143 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9146 /* Fold a call to __builtin_FILE to a constant string. */
9148 static inline tree
9149 fold_builtin_FILE (location_t loc)
9151 if (const char *fname = LOCATION_FILE (loc))
9153 /* The documentation says this builtin is equivalent to the preprocessor
9154 __FILE__ macro so it appears appropriate to use the same file prefix
9155 mappings. */
9156 fname = remap_macro_filename (fname);
9157 return build_string_literal (strlen (fname) + 1, fname);
9160 return build_string_literal (1, "");
9163 /* Fold a call to __builtin_FUNCTION to a constant string. */
9165 static inline tree
9166 fold_builtin_FUNCTION ()
9168 const char *name = "";
9170 if (current_function_decl)
9171 name = lang_hooks.decl_printable_name (current_function_decl, 0);
9173 return build_string_literal (strlen (name) + 1, name);
9176 /* Fold a call to __builtin_LINE to an integer constant. */
9178 static inline tree
9179 fold_builtin_LINE (location_t loc, tree type)
9181 return build_int_cst (type, LOCATION_LINE (loc));
9184 /* Fold a call to built-in function FNDECL with 0 arguments.
9185 This function returns NULL_TREE if no simplification was possible. */
9187 static tree
9188 fold_builtin_0 (location_t loc, tree fndecl)
9190 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9191 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9192 switch (fcode)
9194 case BUILT_IN_FILE:
9195 return fold_builtin_FILE (loc);
9197 case BUILT_IN_FUNCTION:
9198 return fold_builtin_FUNCTION ();
9200 case BUILT_IN_LINE:
9201 return fold_builtin_LINE (loc, type);
9203 CASE_FLT_FN (BUILT_IN_INF):
9204 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
9205 case BUILT_IN_INFD32:
9206 case BUILT_IN_INFD64:
9207 case BUILT_IN_INFD128:
9208 return fold_builtin_inf (loc, type, true);
9210 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9211 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
9212 return fold_builtin_inf (loc, type, false);
9214 case BUILT_IN_CLASSIFY_TYPE:
9215 return fold_builtin_classify_type (NULL_TREE);
9217 default:
9218 break;
9220 return NULL_TREE;
9223 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9224 This function returns NULL_TREE if no simplification was possible. */
9226 static tree
9227 fold_builtin_1 (location_t loc, tree expr, tree fndecl, tree arg0)
9229 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9230 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9232 if (TREE_CODE (arg0) == ERROR_MARK)
9233 return NULL_TREE;
9235 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
9236 return ret;
9238 switch (fcode)
9240 case BUILT_IN_CONSTANT_P:
9242 tree val = fold_builtin_constant_p (arg0);
9244 /* Gimplification will pull the CALL_EXPR for the builtin out of
9245 an if condition. When not optimizing, we'll not CSE it back.
9246 To avoid link error types of regressions, return false now. */
9247 if (!val && !optimize)
9248 val = integer_zero_node;
9250 return val;
9253 case BUILT_IN_CLASSIFY_TYPE:
9254 return fold_builtin_classify_type (arg0);
9256 case BUILT_IN_STRLEN:
9257 return fold_builtin_strlen (loc, expr, type, arg0);
9259 CASE_FLT_FN (BUILT_IN_FABS):
9260 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9261 case BUILT_IN_FABSD32:
9262 case BUILT_IN_FABSD64:
9263 case BUILT_IN_FABSD128:
9264 return fold_builtin_fabs (loc, arg0, type);
9266 case BUILT_IN_ABS:
9267 case BUILT_IN_LABS:
9268 case BUILT_IN_LLABS:
9269 case BUILT_IN_IMAXABS:
9270 return fold_builtin_abs (loc, arg0, type);
9272 CASE_FLT_FN (BUILT_IN_CONJ):
9273 if (validate_arg (arg0, COMPLEX_TYPE)
9274 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9275 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9276 break;
9278 CASE_FLT_FN (BUILT_IN_CREAL):
9279 if (validate_arg (arg0, COMPLEX_TYPE)
9280 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9281 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9282 break;
9284 CASE_FLT_FN (BUILT_IN_CIMAG):
9285 if (validate_arg (arg0, COMPLEX_TYPE)
9286 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9287 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9288 break;
9290 CASE_FLT_FN (BUILT_IN_CARG):
9291 return fold_builtin_carg (loc, arg0, type);
9293 case BUILT_IN_ISASCII:
9294 return fold_builtin_isascii (loc, arg0);
9296 case BUILT_IN_TOASCII:
9297 return fold_builtin_toascii (loc, arg0);
9299 case BUILT_IN_ISDIGIT:
9300 return fold_builtin_isdigit (loc, arg0);
9302 CASE_FLT_FN (BUILT_IN_FINITE):
9303 case BUILT_IN_FINITED32:
9304 case BUILT_IN_FINITED64:
9305 case BUILT_IN_FINITED128:
9306 case BUILT_IN_ISFINITE:
9308 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9309 if (ret)
9310 return ret;
9311 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9314 CASE_FLT_FN (BUILT_IN_ISINF):
9315 case BUILT_IN_ISINFD32:
9316 case BUILT_IN_ISINFD64:
9317 case BUILT_IN_ISINFD128:
9319 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9320 if (ret)
9321 return ret;
9322 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9325 case BUILT_IN_ISNORMAL:
9326 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9328 case BUILT_IN_ISINF_SIGN:
9329 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9331 CASE_FLT_FN (BUILT_IN_ISNAN):
9332 case BUILT_IN_ISNAND32:
9333 case BUILT_IN_ISNAND64:
9334 case BUILT_IN_ISNAND128:
9335 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9337 case BUILT_IN_FREE:
9338 if (integer_zerop (arg0))
9339 return build_empty_stmt (loc);
9340 break;
9342 default:
9343 break;
9346 return NULL_TREE;
9350 /* Folds a call EXPR (which may be null) to built-in function FNDECL
9351 with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
9352 if no simplification was possible. */
9354 static tree
9355 fold_builtin_2 (location_t loc, tree expr, tree fndecl, tree arg0, tree arg1)
9357 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9358 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9360 if (TREE_CODE (arg0) == ERROR_MARK
9361 || TREE_CODE (arg1) == ERROR_MARK)
9362 return NULL_TREE;
9364 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9365 return ret;
9367 switch (fcode)
9369 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9370 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9371 if (validate_arg (arg0, REAL_TYPE)
9372 && validate_arg (arg1, POINTER_TYPE))
9373 return do_mpfr_lgamma_r (arg0, arg1, type);
9374 break;
9376 CASE_FLT_FN (BUILT_IN_FREXP):
9377 return fold_builtin_frexp (loc, arg0, arg1, type);
9379 CASE_FLT_FN (BUILT_IN_MODF):
9380 return fold_builtin_modf (loc, arg0, arg1, type);
9382 case BUILT_IN_STRSPN:
9383 return fold_builtin_strspn (loc, expr, arg0, arg1);
9385 case BUILT_IN_STRCSPN:
9386 return fold_builtin_strcspn (loc, expr, arg0, arg1);
9388 case BUILT_IN_STRPBRK:
9389 return fold_builtin_strpbrk (loc, expr, arg0, arg1, type);
9391 case BUILT_IN_EXPECT:
9392 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
9394 case BUILT_IN_ISGREATER:
9395 return fold_builtin_unordered_cmp (loc, fndecl,
9396 arg0, arg1, UNLE_EXPR, LE_EXPR);
9397 case BUILT_IN_ISGREATEREQUAL:
9398 return fold_builtin_unordered_cmp (loc, fndecl,
9399 arg0, arg1, UNLT_EXPR, LT_EXPR);
9400 case BUILT_IN_ISLESS:
9401 return fold_builtin_unordered_cmp (loc, fndecl,
9402 arg0, arg1, UNGE_EXPR, GE_EXPR);
9403 case BUILT_IN_ISLESSEQUAL:
9404 return fold_builtin_unordered_cmp (loc, fndecl,
9405 arg0, arg1, UNGT_EXPR, GT_EXPR);
9406 case BUILT_IN_ISLESSGREATER:
9407 return fold_builtin_unordered_cmp (loc, fndecl,
9408 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9409 case BUILT_IN_ISUNORDERED:
9410 return fold_builtin_unordered_cmp (loc, fndecl,
9411 arg0, arg1, UNORDERED_EXPR,
9412 NOP_EXPR);
9414 /* We do the folding for va_start in the expander. */
9415 case BUILT_IN_VA_START:
9416 break;
9418 case BUILT_IN_OBJECT_SIZE:
9419 case BUILT_IN_DYNAMIC_OBJECT_SIZE:
9420 return fold_builtin_object_size (arg0, arg1, fcode);
9422 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9423 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9425 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9426 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9428 default:
9429 break;
9431 return NULL_TREE;
9434 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9435 and ARG2.
9436 This function returns NULL_TREE if no simplification was possible. */
9438 static tree
9439 fold_builtin_3 (location_t loc, tree fndecl,
9440 tree arg0, tree arg1, tree arg2)
9442 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9443 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9445 if (TREE_CODE (arg0) == ERROR_MARK
9446 || TREE_CODE (arg1) == ERROR_MARK
9447 || TREE_CODE (arg2) == ERROR_MARK)
9448 return NULL_TREE;
9450 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9451 arg0, arg1, arg2))
9452 return ret;
9454 switch (fcode)
9457 CASE_FLT_FN (BUILT_IN_SINCOS):
9458 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9460 CASE_FLT_FN (BUILT_IN_REMQUO):
9461 if (validate_arg (arg0, REAL_TYPE)
9462 && validate_arg (arg1, REAL_TYPE)
9463 && validate_arg (arg2, POINTER_TYPE))
9464 return do_mpfr_remquo (arg0, arg1, arg2);
9465 break;
9467 case BUILT_IN_MEMCMP:
9468 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9470 case BUILT_IN_EXPECT:
9471 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
9473 case BUILT_IN_EXPECT_WITH_PROBABILITY:
9474 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
9476 case BUILT_IN_ADD_OVERFLOW:
9477 case BUILT_IN_SUB_OVERFLOW:
9478 case BUILT_IN_MUL_OVERFLOW:
9479 case BUILT_IN_ADD_OVERFLOW_P:
9480 case BUILT_IN_SUB_OVERFLOW_P:
9481 case BUILT_IN_MUL_OVERFLOW_P:
9482 case BUILT_IN_SADD_OVERFLOW:
9483 case BUILT_IN_SADDL_OVERFLOW:
9484 case BUILT_IN_SADDLL_OVERFLOW:
9485 case BUILT_IN_SSUB_OVERFLOW:
9486 case BUILT_IN_SSUBL_OVERFLOW:
9487 case BUILT_IN_SSUBLL_OVERFLOW:
9488 case BUILT_IN_SMUL_OVERFLOW:
9489 case BUILT_IN_SMULL_OVERFLOW:
9490 case BUILT_IN_SMULLL_OVERFLOW:
9491 case BUILT_IN_UADD_OVERFLOW:
9492 case BUILT_IN_UADDL_OVERFLOW:
9493 case BUILT_IN_UADDLL_OVERFLOW:
9494 case BUILT_IN_USUB_OVERFLOW:
9495 case BUILT_IN_USUBL_OVERFLOW:
9496 case BUILT_IN_USUBLL_OVERFLOW:
9497 case BUILT_IN_UMUL_OVERFLOW:
9498 case BUILT_IN_UMULL_OVERFLOW:
9499 case BUILT_IN_UMULLL_OVERFLOW:
9500 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9502 default:
9503 break;
9505 return NULL_TREE;
9508 /* Folds a call EXPR (which may be null) to built-in function FNDECL.
9509 ARGS is an array of NARGS arguments. IGNORE is true if the result
9510 of the function call is ignored. This function returns NULL_TREE
9511 if no simplification was possible. */
9513 static tree
9514 fold_builtin_n (location_t loc, tree expr, tree fndecl, tree *args,
9515 int nargs, bool)
9517 tree ret = NULL_TREE;
9519 switch (nargs)
9521 case 0:
9522 ret = fold_builtin_0 (loc, fndecl);
9523 break;
9524 case 1:
9525 ret = fold_builtin_1 (loc, expr, fndecl, args[0]);
9526 break;
9527 case 2:
9528 ret = fold_builtin_2 (loc, expr, fndecl, args[0], args[1]);
9529 break;
9530 case 3:
9531 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9532 break;
9533 default:
9534 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9535 break;
9537 if (ret)
9539 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9540 SET_EXPR_LOCATION (ret, loc);
9541 return ret;
9543 return NULL_TREE;
9546 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9547 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9548 of arguments in ARGS to be omitted. OLDNARGS is the number of
9549 elements in ARGS. */
9551 static tree
9552 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9553 int skip, tree fndecl, int n, va_list newargs)
9555 int nargs = oldnargs - skip + n;
9556 tree *buffer;
9558 if (n > 0)
9560 int i, j;
9562 buffer = XALLOCAVEC (tree, nargs);
9563 for (i = 0; i < n; i++)
9564 buffer[i] = va_arg (newargs, tree);
9565 for (j = skip; j < oldnargs; j++, i++)
9566 buffer[i] = args[j];
9568 else
9569 buffer = args + skip;
9571 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9574 /* Return true if FNDECL shouldn't be folded right now.
9575 If a built-in function has an inline attribute always_inline
9576 wrapper, defer folding it after always_inline functions have
9577 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9578 might not be performed. */
9580 bool
9581 avoid_folding_inline_builtin (tree fndecl)
9583 return (DECL_DECLARED_INLINE_P (fndecl)
9584 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9585 && cfun
9586 && !cfun->always_inline_functions_inlined
9587 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9590 /* A wrapper function for builtin folding that prevents warnings for
9591 "statement without effect" and the like, caused by removing the
9592 call node earlier than the warning is generated. */
9594 tree
9595 fold_call_expr (location_t loc, tree exp, bool ignore)
9597 tree ret = NULL_TREE;
9598 tree fndecl = get_callee_fndecl (exp);
9599 if (fndecl && fndecl_built_in_p (fndecl)
9600 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9601 yet. Defer folding until we see all the arguments
9602 (after inlining). */
9603 && !CALL_EXPR_VA_ARG_PACK (exp))
9605 int nargs = call_expr_nargs (exp);
9607 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9608 instead last argument is __builtin_va_arg_pack (). Defer folding
9609 even in that case, until arguments are finalized. */
9610 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9612 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9613 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9614 return NULL_TREE;
9617 if (avoid_folding_inline_builtin (fndecl))
9618 return NULL_TREE;
9620 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9621 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9622 CALL_EXPR_ARGP (exp), ignore);
9623 else
9625 tree *args = CALL_EXPR_ARGP (exp);
9626 ret = fold_builtin_n (loc, exp, fndecl, args, nargs, ignore);
9627 if (ret)
9628 return ret;
9631 return NULL_TREE;
9634 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9635 N arguments are passed in the array ARGARRAY. Return a folded
9636 expression or NULL_TREE if no simplification was possible. */
9638 tree
9639 fold_builtin_call_array (location_t loc, tree,
9640 tree fn,
9641 int n,
9642 tree *argarray)
9644 if (TREE_CODE (fn) != ADDR_EXPR)
9645 return NULL_TREE;
9647 tree fndecl = TREE_OPERAND (fn, 0);
9648 if (TREE_CODE (fndecl) == FUNCTION_DECL
9649 && fndecl_built_in_p (fndecl))
9651 /* If last argument is __builtin_va_arg_pack (), arguments to this
9652 function are not finalized yet. Defer folding until they are. */
9653 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9655 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9656 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9657 return NULL_TREE;
9659 if (avoid_folding_inline_builtin (fndecl))
9660 return NULL_TREE;
9661 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9662 return targetm.fold_builtin (fndecl, n, argarray, false);
9663 else
9664 return fold_builtin_n (loc, NULL_TREE, fndecl, argarray, n, false);
9667 return NULL_TREE;
9670 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9671 along with N new arguments specified as the "..." parameters. SKIP
9672 is the number of arguments in EXP to be omitted. This function is used
9673 to do varargs-to-varargs transformations. */
9675 static tree
9676 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9678 va_list ap;
9679 tree t;
9681 va_start (ap, n);
9682 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9683 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9684 va_end (ap);
9686 return t;
9689 /* Validate a single argument ARG against a tree code CODE representing
9690 a type. Return true when argument is valid. */
9692 static bool
9693 validate_arg (const_tree arg, enum tree_code code)
9695 if (!arg)
9696 return false;
9697 else if (code == POINTER_TYPE)
9698 return POINTER_TYPE_P (TREE_TYPE (arg));
9699 else if (code == INTEGER_TYPE)
9700 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9701 return code == TREE_CODE (TREE_TYPE (arg));
9704 /* This function validates the types of a function call argument list
9705 against a specified list of tree_codes. If the last specifier is a 0,
9706 that represents an ellipses, otherwise the last specifier must be a
9707 VOID_TYPE.
9709 This is the GIMPLE version of validate_arglist. Eventually we want to
9710 completely convert builtins.cc to work from GIMPLEs and the tree based
9711 validate_arglist will then be removed. */
9713 bool
9714 validate_gimple_arglist (const gcall *call, ...)
9716 enum tree_code code;
9717 bool res = 0;
9718 va_list ap;
9719 const_tree arg;
9720 size_t i;
9722 va_start (ap, call);
9723 i = 0;
9727 code = (enum tree_code) va_arg (ap, int);
9728 switch (code)
9730 case 0:
9731 /* This signifies an ellipses, any further arguments are all ok. */
9732 res = true;
9733 goto end;
9734 case VOID_TYPE:
9735 /* This signifies an endlink, if no arguments remain, return
9736 true, otherwise return false. */
9737 res = (i == gimple_call_num_args (call));
9738 goto end;
9739 default:
9740 /* If no parameters remain or the parameter's code does not
9741 match the specified code, return false. Otherwise continue
9742 checking any remaining arguments. */
9743 arg = gimple_call_arg (call, i++);
9744 if (!validate_arg (arg, code))
9745 goto end;
9746 break;
9749 while (1);
9751 /* We need gotos here since we can only have one VA_CLOSE in a
9752 function. */
9753 end: ;
9754 va_end (ap);
9756 return res;
9759 /* Default target-specific builtin expander that does nothing. */
9762 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9763 rtx target ATTRIBUTE_UNUSED,
9764 rtx subtarget ATTRIBUTE_UNUSED,
9765 machine_mode mode ATTRIBUTE_UNUSED,
9766 int ignore ATTRIBUTE_UNUSED)
9768 return NULL_RTX;
9771 /* Returns true is EXP represents data that would potentially reside
9772 in a readonly section. */
9774 bool
9775 readonly_data_expr (tree exp)
9777 STRIP_NOPS (exp);
9779 if (TREE_CODE (exp) != ADDR_EXPR)
9780 return false;
9782 exp = get_base_address (TREE_OPERAND (exp, 0));
9783 if (!exp)
9784 return false;
9786 /* Make sure we call decl_readonly_section only for trees it
9787 can handle (since it returns true for everything it doesn't
9788 understand). */
9789 if (TREE_CODE (exp) == STRING_CST
9790 || TREE_CODE (exp) == CONSTRUCTOR
9791 || (VAR_P (exp) && TREE_STATIC (exp)))
9792 return decl_readonly_section (exp, 0);
9793 else
9794 return false;
9797 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9798 to the call, and TYPE is its return type.
9800 Return NULL_TREE if no simplification was possible, otherwise return the
9801 simplified form of the call as a tree.
9803 The simplified form may be a constant or other expression which
9804 computes the same value, but in a more efficient manner (including
9805 calls to other builtin functions).
9807 The call may contain arguments which need to be evaluated, but
9808 which are not useful to determine the result of the call. In
9809 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9810 COMPOUND_EXPR will be an argument which must be evaluated.
9811 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9812 COMPOUND_EXPR in the chain will contain the tree for the simplified
9813 form of the builtin function call. */
9815 static tree
9816 fold_builtin_strpbrk (location_t loc, tree, tree s1, tree s2, tree type)
9818 if (!validate_arg (s1, POINTER_TYPE)
9819 || !validate_arg (s2, POINTER_TYPE))
9820 return NULL_TREE;
9822 tree fn;
9823 const char *p1, *p2;
9825 p2 = c_getstr (s2);
9826 if (p2 == NULL)
9827 return NULL_TREE;
9829 p1 = c_getstr (s1);
9830 if (p1 != NULL)
9832 const char *r = strpbrk (p1, p2);
9833 tree tem;
9835 if (r == NULL)
9836 return build_int_cst (TREE_TYPE (s1), 0);
9838 /* Return an offset into the constant string argument. */
9839 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9840 return fold_convert_loc (loc, type, tem);
9843 if (p2[0] == '\0')
9844 /* strpbrk(x, "") == NULL.
9845 Evaluate and ignore s1 in case it had side-effects. */
9846 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
9848 if (p2[1] != '\0')
9849 return NULL_TREE; /* Really call strpbrk. */
9851 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9852 if (!fn)
9853 return NULL_TREE;
9855 /* New argument list transforming strpbrk(s1, s2) to
9856 strchr(s1, s2[0]). */
9857 return build_call_expr_loc (loc, fn, 2, s1,
9858 build_int_cst (integer_type_node, p2[0]));
9861 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9862 to the call.
9864 Return NULL_TREE if no simplification was possible, otherwise return the
9865 simplified form of the call as a tree.
9867 The simplified form may be a constant or other expression which
9868 computes the same value, but in a more efficient manner (including
9869 calls to other builtin functions).
9871 The call may contain arguments which need to be evaluated, but
9872 which are not useful to determine the result of the call. In
9873 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9874 COMPOUND_EXPR will be an argument which must be evaluated.
9875 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9876 COMPOUND_EXPR in the chain will contain the tree for the simplified
9877 form of the builtin function call. */
9879 static tree
9880 fold_builtin_strspn (location_t loc, tree expr, tree s1, tree s2)
9882 if (!validate_arg (s1, POINTER_TYPE)
9883 || !validate_arg (s2, POINTER_TYPE))
9884 return NULL_TREE;
9886 if (!check_nul_terminated_array (expr, s1)
9887 || !check_nul_terminated_array (expr, s2))
9888 return NULL_TREE;
9890 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9892 /* If either argument is "", return NULL_TREE. */
9893 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9894 /* Evaluate and ignore both arguments in case either one has
9895 side-effects. */
9896 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9897 s1, s2);
9898 return NULL_TREE;
9901 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9902 to the call.
9904 Return NULL_TREE if no simplification was possible, otherwise return the
9905 simplified form of the call as a tree.
9907 The simplified form may be a constant or other expression which
9908 computes the same value, but in a more efficient manner (including
9909 calls to other builtin functions).
9911 The call may contain arguments which need to be evaluated, but
9912 which are not useful to determine the result of the call. In
9913 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9914 COMPOUND_EXPR will be an argument which must be evaluated.
9915 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9916 COMPOUND_EXPR in the chain will contain the tree for the simplified
9917 form of the builtin function call. */
9919 static tree
9920 fold_builtin_strcspn (location_t loc, tree expr, tree s1, tree s2)
9922 if (!validate_arg (s1, POINTER_TYPE)
9923 || !validate_arg (s2, POINTER_TYPE))
9924 return NULL_TREE;
9926 if (!check_nul_terminated_array (expr, s1)
9927 || !check_nul_terminated_array (expr, s2))
9928 return NULL_TREE;
9930 /* If the first argument is "", return NULL_TREE. */
9931 const char *p1 = c_getstr (s1);
9932 if (p1 && *p1 == '\0')
9934 /* Evaluate and ignore argument s2 in case it has
9935 side-effects. */
9936 return omit_one_operand_loc (loc, size_type_node,
9937 size_zero_node, s2);
9940 /* If the second argument is "", return __builtin_strlen(s1). */
9941 const char *p2 = c_getstr (s2);
9942 if (p2 && *p2 == '\0')
9944 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9946 /* If the replacement _DECL isn't initialized, don't do the
9947 transformation. */
9948 if (!fn)
9949 return NULL_TREE;
9951 return build_call_expr_loc (loc, fn, 1, s1);
9953 return NULL_TREE;
9956 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9957 produced. False otherwise. This is done so that we don't output the error
9958 or warning twice or three times. */
9960 bool
9961 fold_builtin_next_arg (tree exp, bool va_start_p)
9963 tree fntype = TREE_TYPE (current_function_decl);
9964 int nargs = call_expr_nargs (exp);
9965 tree arg;
9966 /* There is good chance the current input_location points inside the
9967 definition of the va_start macro (perhaps on the token for
9968 builtin) in a system header, so warnings will not be emitted.
9969 Use the location in real source code. */
9970 location_t current_location =
9971 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9972 NULL);
9974 if (!stdarg_p (fntype))
9976 error ("%<va_start%> used in function with fixed arguments");
9977 return true;
9980 if (va_start_p)
9982 if (va_start_p && (nargs != 2))
9984 error ("wrong number of arguments to function %<va_start%>");
9985 return true;
9987 arg = CALL_EXPR_ARG (exp, 1);
9989 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9990 when we checked the arguments and if needed issued a warning. */
9991 else
9993 if (nargs == 0)
9995 /* Evidently an out of date version of <stdarg.h>; can't validate
9996 va_start's second argument, but can still work as intended. */
9997 warning_at (current_location,
9998 OPT_Wvarargs,
9999 "%<__builtin_next_arg%> called without an argument");
10000 return true;
10002 else if (nargs > 1)
10004 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10005 return true;
10007 arg = CALL_EXPR_ARG (exp, 0);
10010 if (TREE_CODE (arg) == SSA_NAME
10011 && SSA_NAME_VAR (arg))
10012 arg = SSA_NAME_VAR (arg);
10014 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10015 or __builtin_next_arg (0) the first time we see it, after checking
10016 the arguments and if needed issuing a warning. */
10017 if (!integer_zerop (arg))
10019 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10021 /* Strip off all nops for the sake of the comparison. This
10022 is not quite the same as STRIP_NOPS. It does more.
10023 We must also strip off INDIRECT_EXPR for C++ reference
10024 parameters. */
10025 while (CONVERT_EXPR_P (arg)
10026 || TREE_CODE (arg) == INDIRECT_REF)
10027 arg = TREE_OPERAND (arg, 0);
10028 if (arg != last_parm)
10030 /* FIXME: Sometimes with the tree optimizers we can get the
10031 not the last argument even though the user used the last
10032 argument. We just warn and set the arg to be the last
10033 argument so that we will get wrong-code because of
10034 it. */
10035 warning_at (current_location,
10036 OPT_Wvarargs,
10037 "second parameter of %<va_start%> not last named argument");
10040 /* Undefined by C99 7.15.1.4p4 (va_start):
10041 "If the parameter parmN is declared with the register storage
10042 class, with a function or array type, or with a type that is
10043 not compatible with the type that results after application of
10044 the default argument promotions, the behavior is undefined."
10046 else if (DECL_REGISTER (arg))
10048 warning_at (current_location,
10049 OPT_Wvarargs,
10050 "undefined behavior when second parameter of "
10051 "%<va_start%> is declared with %<register%> storage");
10054 /* We want to verify the second parameter just once before the tree
10055 optimizers are run and then avoid keeping it in the tree,
10056 as otherwise we could warn even for correct code like:
10057 void foo (int i, ...)
10058 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10059 if (va_start_p)
10060 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10061 else
10062 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10064 return false;
10068 /* Expand a call EXP to __builtin_object_size. */
10070 static rtx
10071 expand_builtin_object_size (tree exp)
10073 tree ost;
10074 int object_size_type;
10075 tree fndecl = get_callee_fndecl (exp);
10077 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10079 error ("first argument of %qD must be a pointer, second integer constant",
10080 fndecl);
10081 expand_builtin_trap ();
10082 return const0_rtx;
10085 ost = CALL_EXPR_ARG (exp, 1);
10086 STRIP_NOPS (ost);
10088 if (TREE_CODE (ost) != INTEGER_CST
10089 || tree_int_cst_sgn (ost) < 0
10090 || compare_tree_int (ost, 3) > 0)
10092 error ("last argument of %qD is not integer constant between 0 and 3",
10093 fndecl);
10094 expand_builtin_trap ();
10095 return const0_rtx;
10098 object_size_type = tree_to_shwi (ost);
10100 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10103 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10104 FCODE is the BUILT_IN_* to use.
10105 Return NULL_RTX if we failed; the caller should emit a normal call,
10106 otherwise try to get the result in TARGET, if convenient (and in
10107 mode MODE if that's convenient). */
10109 static rtx
10110 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10111 enum built_in_function fcode)
10113 if (!validate_arglist (exp,
10114 POINTER_TYPE,
10115 fcode == BUILT_IN_MEMSET_CHK
10116 ? INTEGER_TYPE : POINTER_TYPE,
10117 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10118 return NULL_RTX;
10120 tree dest = CALL_EXPR_ARG (exp, 0);
10121 tree src = CALL_EXPR_ARG (exp, 1);
10122 tree len = CALL_EXPR_ARG (exp, 2);
10123 tree size = CALL_EXPR_ARG (exp, 3);
10125 /* FIXME: Set access mode to write only for memset et al. */
10126 bool sizes_ok = check_access (exp, len, /*maxread=*/NULL_TREE,
10127 /*srcstr=*/NULL_TREE, size, access_read_write);
10129 if (!tree_fits_uhwi_p (size))
10130 return NULL_RTX;
10132 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10134 /* Avoid transforming the checking call to an ordinary one when
10135 an overflow has been detected or when the call couldn't be
10136 validated because the size is not constant. */
10137 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10138 return NULL_RTX;
10140 tree fn = NULL_TREE;
10141 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10142 mem{cpy,pcpy,move,set} is available. */
10143 switch (fcode)
10145 case BUILT_IN_MEMCPY_CHK:
10146 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10147 break;
10148 case BUILT_IN_MEMPCPY_CHK:
10149 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10150 break;
10151 case BUILT_IN_MEMMOVE_CHK:
10152 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10153 break;
10154 case BUILT_IN_MEMSET_CHK:
10155 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10156 break;
10157 default:
10158 break;
10161 if (! fn)
10162 return NULL_RTX;
10164 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10165 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10166 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10167 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10169 else if (fcode == BUILT_IN_MEMSET_CHK)
10170 return NULL_RTX;
10171 else
10173 unsigned int dest_align = get_pointer_alignment (dest);
10175 /* If DEST is not a pointer type, call the normal function. */
10176 if (dest_align == 0)
10177 return NULL_RTX;
10179 /* If SRC and DEST are the same (and not volatile), do nothing. */
10180 if (operand_equal_p (src, dest, 0))
10182 tree expr;
10184 if (fcode != BUILT_IN_MEMPCPY_CHK)
10186 /* Evaluate and ignore LEN in case it has side-effects. */
10187 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10188 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10191 expr = fold_build_pointer_plus (dest, len);
10192 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10195 /* __memmove_chk special case. */
10196 if (fcode == BUILT_IN_MEMMOVE_CHK)
10198 unsigned int src_align = get_pointer_alignment (src);
10200 if (src_align == 0)
10201 return NULL_RTX;
10203 /* If src is categorized for a readonly section we can use
10204 normal __memcpy_chk. */
10205 if (readonly_data_expr (src))
10207 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10208 if (!fn)
10209 return NULL_RTX;
10210 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10211 dest, src, len, size);
10212 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10213 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10214 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10217 return NULL_RTX;
10221 /* Emit warning if a buffer overflow is detected at compile time. */
10223 static void
10224 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10226 /* The source string. */
10227 tree srcstr = NULL_TREE;
10228 /* The size of the destination object returned by __builtin_object_size. */
10229 tree objsize = NULL_TREE;
10230 /* The string that is being concatenated with (as in __strcat_chk)
10231 or null if it isn't. */
10232 tree catstr = NULL_TREE;
10233 /* The maximum length of the source sequence in a bounded operation
10234 (such as __strncat_chk) or null if the operation isn't bounded
10235 (such as __strcat_chk). */
10236 tree maxread = NULL_TREE;
10237 /* The exact size of the access (such as in __strncpy_chk). */
10238 tree size = NULL_TREE;
10239 /* The access by the function that's checked. Except for snprintf
10240 both writing and reading is checked. */
10241 access_mode mode = access_read_write;
10243 switch (fcode)
10245 case BUILT_IN_STRCPY_CHK:
10246 case BUILT_IN_STPCPY_CHK:
10247 srcstr = CALL_EXPR_ARG (exp, 1);
10248 objsize = CALL_EXPR_ARG (exp, 2);
10249 break;
10251 case BUILT_IN_STRCAT_CHK:
10252 /* For __strcat_chk the warning will be emitted only if overflowing
10253 by at least strlen (dest) + 1 bytes. */
10254 catstr = CALL_EXPR_ARG (exp, 0);
10255 srcstr = CALL_EXPR_ARG (exp, 1);
10256 objsize = CALL_EXPR_ARG (exp, 2);
10257 break;
10259 case BUILT_IN_STRNCAT_CHK:
10260 catstr = CALL_EXPR_ARG (exp, 0);
10261 srcstr = CALL_EXPR_ARG (exp, 1);
10262 maxread = CALL_EXPR_ARG (exp, 2);
10263 objsize = CALL_EXPR_ARG (exp, 3);
10264 break;
10266 case BUILT_IN_STRNCPY_CHK:
10267 case BUILT_IN_STPNCPY_CHK:
10268 srcstr = CALL_EXPR_ARG (exp, 1);
10269 size = CALL_EXPR_ARG (exp, 2);
10270 objsize = CALL_EXPR_ARG (exp, 3);
10271 break;
10273 case BUILT_IN_SNPRINTF_CHK:
10274 case BUILT_IN_VSNPRINTF_CHK:
10275 maxread = CALL_EXPR_ARG (exp, 1);
10276 objsize = CALL_EXPR_ARG (exp, 3);
10277 /* The only checked access the write to the destination. */
10278 mode = access_write_only;
10279 break;
10280 default:
10281 gcc_unreachable ();
10284 if (catstr && maxread)
10286 /* Check __strncat_chk. There is no way to determine the length
10287 of the string to which the source string is being appended so
10288 just warn when the length of the source string is not known. */
10289 check_strncat_sizes (exp, objsize);
10290 return;
10293 check_access (exp, size, maxread, srcstr, objsize, mode);
10296 /* Emit warning if a buffer overflow is detected at compile time
10297 in __sprintf_chk/__vsprintf_chk calls. */
10299 static void
10300 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10302 tree size, len, fmt;
10303 const char *fmt_str;
10304 int nargs = call_expr_nargs (exp);
10306 /* Verify the required arguments in the original call. */
10308 if (nargs < 4)
10309 return;
10310 size = CALL_EXPR_ARG (exp, 2);
10311 fmt = CALL_EXPR_ARG (exp, 3);
10313 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10314 return;
10316 /* Check whether the format is a literal string constant. */
10317 fmt_str = c_getstr (fmt);
10318 if (fmt_str == NULL)
10319 return;
10321 if (!init_target_chars ())
10322 return;
10324 /* If the format doesn't contain % args or %%, we know its size. */
10325 if (strchr (fmt_str, target_percent) == 0)
10326 len = build_int_cstu (size_type_node, strlen (fmt_str));
10327 /* If the format is "%s" and first ... argument is a string literal,
10328 we know it too. */
10329 else if (fcode == BUILT_IN_SPRINTF_CHK
10330 && strcmp (fmt_str, target_percent_s) == 0)
10332 tree arg;
10334 if (nargs < 5)
10335 return;
10336 arg = CALL_EXPR_ARG (exp, 4);
10337 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10338 return;
10340 len = c_strlen (arg, 1);
10341 if (!len || ! tree_fits_uhwi_p (len))
10342 return;
10344 else
10345 return;
10347 /* Add one for the terminating nul. */
10348 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10350 check_access (exp, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, len, size,
10351 access_write_only);
10354 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10355 if possible. */
10357 static tree
10358 fold_builtin_object_size (tree ptr, tree ost, enum built_in_function fcode)
10360 tree bytes;
10361 int object_size_type;
10363 if (!validate_arg (ptr, POINTER_TYPE)
10364 || !validate_arg (ost, INTEGER_TYPE))
10365 return NULL_TREE;
10367 STRIP_NOPS (ost);
10369 if (TREE_CODE (ost) != INTEGER_CST
10370 || tree_int_cst_sgn (ost) < 0
10371 || compare_tree_int (ost, 3) > 0)
10372 return NULL_TREE;
10374 object_size_type = tree_to_shwi (ost);
10376 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10377 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10378 and (size_t) 0 for types 2 and 3. */
10379 if (TREE_SIDE_EFFECTS (ptr))
10380 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10382 if (fcode == BUILT_IN_DYNAMIC_OBJECT_SIZE)
10383 object_size_type |= OST_DYNAMIC;
10385 if (TREE_CODE (ptr) == ADDR_EXPR)
10387 compute_builtin_object_size (ptr, object_size_type, &bytes);
10388 if ((object_size_type & OST_DYNAMIC)
10389 || int_fits_type_p (bytes, size_type_node))
10390 return fold_convert (size_type_node, bytes);
10392 else if (TREE_CODE (ptr) == SSA_NAME)
10394 /* If object size is not known yet, delay folding until
10395 later. Maybe subsequent passes will help determining
10396 it. */
10397 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10398 && ((object_size_type & OST_DYNAMIC)
10399 || int_fits_type_p (bytes, size_type_node)))
10400 return fold_convert (size_type_node, bytes);
10403 return NULL_TREE;
10406 /* Builtins with folding operations that operate on "..." arguments
10407 need special handling; we need to store the arguments in a convenient
10408 data structure before attempting any folding. Fortunately there are
10409 only a few builtins that fall into this category. FNDECL is the
10410 function, EXP is the CALL_EXPR for the call. */
10412 static tree
10413 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10415 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10416 tree ret = NULL_TREE;
10418 switch (fcode)
10420 case BUILT_IN_FPCLASSIFY:
10421 ret = fold_builtin_fpclassify (loc, args, nargs);
10422 break;
10424 default:
10425 break;
10427 if (ret)
10429 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10430 SET_EXPR_LOCATION (ret, loc);
10431 suppress_warning (ret);
10432 return ret;
10434 return NULL_TREE;
10437 /* Initialize format string characters in the target charset. */
10439 bool
10440 init_target_chars (void)
10442 static bool init;
10443 if (!init)
10445 target_newline = lang_hooks.to_target_charset ('\n');
10446 target_percent = lang_hooks.to_target_charset ('%');
10447 target_c = lang_hooks.to_target_charset ('c');
10448 target_s = lang_hooks.to_target_charset ('s');
10449 if (target_newline == 0 || target_percent == 0 || target_c == 0
10450 || target_s == 0)
10451 return false;
10453 target_percent_c[0] = target_percent;
10454 target_percent_c[1] = target_c;
10455 target_percent_c[2] = '\0';
10457 target_percent_s[0] = target_percent;
10458 target_percent_s[1] = target_s;
10459 target_percent_s[2] = '\0';
10461 target_percent_s_newline[0] = target_percent;
10462 target_percent_s_newline[1] = target_s;
10463 target_percent_s_newline[2] = target_newline;
10464 target_percent_s_newline[3] = '\0';
10466 init = true;
10468 return true;
10471 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10472 and no overflow/underflow occurred. INEXACT is true if M was not
10473 exactly calculated. TYPE is the tree type for the result. This
10474 function assumes that you cleared the MPFR flags and then
10475 calculated M to see if anything subsequently set a flag prior to
10476 entering this function. Return NULL_TREE if any checks fail. */
10478 static tree
10479 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10481 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10482 overflow/underflow occurred. If -frounding-math, proceed iff the
10483 result of calling FUNC was exact. */
10484 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10485 && (!flag_rounding_math || !inexact))
10487 REAL_VALUE_TYPE rr;
10489 real_from_mpfr (&rr, m, type, MPFR_RNDN);
10490 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10491 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10492 but the mpft_t is not, then we underflowed in the
10493 conversion. */
10494 if (real_isfinite (&rr)
10495 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10497 REAL_VALUE_TYPE rmode;
10499 real_convert (&rmode, TYPE_MODE (type), &rr);
10500 /* Proceed iff the specified mode can hold the value. */
10501 if (real_identical (&rmode, &rr))
10502 return build_real (type, rmode);
10505 return NULL_TREE;
10508 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10509 number and no overflow/underflow occurred. INEXACT is true if M
10510 was not exactly calculated. TYPE is the tree type for the result.
10511 This function assumes that you cleared the MPFR flags and then
10512 calculated M to see if anything subsequently set a flag prior to
10513 entering this function. Return NULL_TREE if any checks fail, if
10514 FORCE_CONVERT is true, then bypass the checks. */
10516 static tree
10517 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10519 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10520 overflow/underflow occurred. If -frounding-math, proceed iff the
10521 result of calling FUNC was exact. */
10522 if (force_convert
10523 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10524 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10525 && (!flag_rounding_math || !inexact)))
10527 REAL_VALUE_TYPE re, im;
10529 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), MPFR_RNDN);
10530 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), MPFR_RNDN);
10531 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10532 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10533 but the mpft_t is not, then we underflowed in the
10534 conversion. */
10535 if (force_convert
10536 || (real_isfinite (&re) && real_isfinite (&im)
10537 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10538 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10540 REAL_VALUE_TYPE re_mode, im_mode;
10542 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10543 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10544 /* Proceed iff the specified mode can hold the value. */
10545 if (force_convert
10546 || (real_identical (&re_mode, &re)
10547 && real_identical (&im_mode, &im)))
10548 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10549 build_real (TREE_TYPE (type), im_mode));
10552 return NULL_TREE;
10555 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10556 the pointer *(ARG_QUO) and return the result. The type is taken
10557 from the type of ARG0 and is used for setting the precision of the
10558 calculation and results. */
10560 static tree
10561 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10563 tree const type = TREE_TYPE (arg0);
10564 tree result = NULL_TREE;
10566 STRIP_NOPS (arg0);
10567 STRIP_NOPS (arg1);
10569 /* To proceed, MPFR must exactly represent the target floating point
10570 format, which only happens when the target base equals two. */
10571 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10572 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10573 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10575 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10576 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10578 if (real_isfinite (ra0) && real_isfinite (ra1))
10580 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10581 const int prec = fmt->p;
10582 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
10583 tree result_rem;
10584 long integer_quo;
10585 mpfr_t m0, m1;
10587 mpfr_inits2 (prec, m0, m1, NULL);
10588 mpfr_from_real (m0, ra0, MPFR_RNDN);
10589 mpfr_from_real (m1, ra1, MPFR_RNDN);
10590 mpfr_clear_flags ();
10591 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10592 /* Remquo is independent of the rounding mode, so pass
10593 inexact=0 to do_mpfr_ckconv(). */
10594 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10595 mpfr_clears (m0, m1, NULL);
10596 if (result_rem)
10598 /* MPFR calculates quo in the host's long so it may
10599 return more bits in quo than the target int can hold
10600 if sizeof(host long) > sizeof(target int). This can
10601 happen even for native compilers in LP64 mode. In
10602 these cases, modulo the quo value with the largest
10603 number that the target int can hold while leaving one
10604 bit for the sign. */
10605 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10606 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10608 /* Dereference the quo pointer argument. */
10609 arg_quo = build_fold_indirect_ref (arg_quo);
10610 /* Proceed iff a valid pointer type was passed in. */
10611 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10613 /* Set the value. */
10614 tree result_quo
10615 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10616 build_int_cst (TREE_TYPE (arg_quo),
10617 integer_quo));
10618 TREE_SIDE_EFFECTS (result_quo) = 1;
10619 /* Combine the quo assignment with the rem. */
10620 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10621 result_quo, result_rem));
10626 return result;
10629 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10630 resulting value as a tree with type TYPE. The mpfr precision is
10631 set to the precision of TYPE. We assume that this mpfr function
10632 returns zero if the result could be calculated exactly within the
10633 requested precision. In addition, the integer pointer represented
10634 by ARG_SG will be dereferenced and set to the appropriate signgam
10635 (-1,1) value. */
10637 static tree
10638 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10640 tree result = NULL_TREE;
10642 STRIP_NOPS (arg);
10644 /* To proceed, MPFR must exactly represent the target floating point
10645 format, which only happens when the target base equals two. Also
10646 verify ARG is a constant and that ARG_SG is an int pointer. */
10647 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10648 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10649 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10650 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10652 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10654 /* In addition to NaN and Inf, the argument cannot be zero or a
10655 negative integer. */
10656 if (real_isfinite (ra)
10657 && ra->cl != rvc_zero
10658 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10660 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10661 const int prec = fmt->p;
10662 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
10663 int inexact, sg;
10664 mpfr_t m;
10665 tree result_lg;
10667 mpfr_init2 (m, prec);
10668 mpfr_from_real (m, ra, MPFR_RNDN);
10669 mpfr_clear_flags ();
10670 inexact = mpfr_lgamma (m, &sg, m, rnd);
10671 result_lg = do_mpfr_ckconv (m, type, inexact);
10672 mpfr_clear (m);
10673 if (result_lg)
10675 tree result_sg;
10677 /* Dereference the arg_sg pointer argument. */
10678 arg_sg = build_fold_indirect_ref (arg_sg);
10679 /* Assign the signgam value into *arg_sg. */
10680 result_sg = fold_build2 (MODIFY_EXPR,
10681 TREE_TYPE (arg_sg), arg_sg,
10682 build_int_cst (TREE_TYPE (arg_sg), sg));
10683 TREE_SIDE_EFFECTS (result_sg) = 1;
10684 /* Combine the signgam assignment with the lgamma result. */
10685 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10686 result_sg, result_lg));
10691 return result;
10694 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10695 mpc function FUNC on it and return the resulting value as a tree
10696 with type TYPE. The mpfr precision is set to the precision of
10697 TYPE. We assume that function FUNC returns zero if the result
10698 could be calculated exactly within the requested precision. If
10699 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10700 in the arguments and/or results. */
10702 tree
10703 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10704 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10706 tree result = NULL_TREE;
10708 STRIP_NOPS (arg0);
10709 STRIP_NOPS (arg1);
10711 /* To proceed, MPFR must exactly represent the target floating point
10712 format, which only happens when the target base equals two. */
10713 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10714 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10715 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10716 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10717 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10719 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10720 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10721 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10722 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10724 if (do_nonfinite
10725 || (real_isfinite (re0) && real_isfinite (im0)
10726 && real_isfinite (re1) && real_isfinite (im1)))
10728 const struct real_format *const fmt =
10729 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10730 const int prec = fmt->p;
10731 const mpfr_rnd_t rnd = fmt->round_towards_zero
10732 ? MPFR_RNDZ : MPFR_RNDN;
10733 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10734 int inexact;
10735 mpc_t m0, m1;
10737 mpc_init2 (m0, prec);
10738 mpc_init2 (m1, prec);
10739 mpfr_from_real (mpc_realref (m0), re0, rnd);
10740 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10741 mpfr_from_real (mpc_realref (m1), re1, rnd);
10742 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10743 mpfr_clear_flags ();
10744 inexact = func (m0, m0, m1, crnd);
10745 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10746 mpc_clear (m0);
10747 mpc_clear (m1);
10751 return result;
10754 /* A wrapper function for builtin folding that prevents warnings for
10755 "statement without effect" and the like, caused by removing the
10756 call node earlier than the warning is generated. */
10758 tree
10759 fold_call_stmt (gcall *stmt, bool ignore)
10761 tree ret = NULL_TREE;
10762 tree fndecl = gimple_call_fndecl (stmt);
10763 location_t loc = gimple_location (stmt);
10764 if (fndecl && fndecl_built_in_p (fndecl)
10765 && !gimple_call_va_arg_pack_p (stmt))
10767 int nargs = gimple_call_num_args (stmt);
10768 tree *args = (nargs > 0
10769 ? gimple_call_arg_ptr (stmt, 0)
10770 : &error_mark_node);
10772 if (avoid_folding_inline_builtin (fndecl))
10773 return NULL_TREE;
10774 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10776 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10778 else
10780 ret = fold_builtin_n (loc, NULL_TREE, fndecl, args, nargs, ignore);
10781 if (ret)
10783 /* Propagate location information from original call to
10784 expansion of builtin. Otherwise things like
10785 maybe_emit_chk_warning, that operate on the expansion
10786 of a builtin, will use the wrong location information. */
10787 if (gimple_has_location (stmt))
10789 tree realret = ret;
10790 if (TREE_CODE (ret) == NOP_EXPR)
10791 realret = TREE_OPERAND (ret, 0);
10792 if (CAN_HAVE_LOCATION_P (realret)
10793 && !EXPR_HAS_LOCATION (realret))
10794 SET_EXPR_LOCATION (realret, loc);
10795 return realret;
10797 return ret;
10801 return NULL_TREE;
10804 /* Look up the function in builtin_decl that corresponds to DECL
10805 and set ASMSPEC as its user assembler name. DECL must be a
10806 function decl that declares a builtin. */
10808 void
10809 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10811 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
10812 && asmspec != 0);
10814 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10815 set_user_assembler_name (builtin, asmspec);
10817 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10818 && INT_TYPE_SIZE < BITS_PER_WORD)
10820 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
10821 set_user_assembler_libfunc ("ffs", asmspec);
10822 set_optab_libfunc (ffs_optab, mode, "ffs");
10826 /* Return true if DECL is a builtin that expands to a constant or similarly
10827 simple code. */
10828 bool
10829 is_simple_builtin (tree decl)
10831 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
10832 switch (DECL_FUNCTION_CODE (decl))
10834 /* Builtins that expand to constants. */
10835 case BUILT_IN_CONSTANT_P:
10836 case BUILT_IN_EXPECT:
10837 case BUILT_IN_OBJECT_SIZE:
10838 case BUILT_IN_UNREACHABLE:
10839 /* Simple register moves or loads from stack. */
10840 case BUILT_IN_ASSUME_ALIGNED:
10841 case BUILT_IN_RETURN_ADDRESS:
10842 case BUILT_IN_EXTRACT_RETURN_ADDR:
10843 case BUILT_IN_FROB_RETURN_ADDR:
10844 case BUILT_IN_RETURN:
10845 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10846 case BUILT_IN_FRAME_ADDRESS:
10847 case BUILT_IN_VA_END:
10848 case BUILT_IN_STACK_SAVE:
10849 case BUILT_IN_STACK_RESTORE:
10850 case BUILT_IN_DWARF_CFA:
10851 /* Exception state returns or moves registers around. */
10852 case BUILT_IN_EH_FILTER:
10853 case BUILT_IN_EH_POINTER:
10854 case BUILT_IN_EH_COPY_VALUES:
10855 return true;
10857 default:
10858 return false;
10861 return false;
10864 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10865 most probably expanded inline into reasonably simple code. This is a
10866 superset of is_simple_builtin. */
10867 bool
10868 is_inexpensive_builtin (tree decl)
10870 if (!decl)
10871 return false;
10872 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10873 return true;
10874 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10875 switch (DECL_FUNCTION_CODE (decl))
10877 case BUILT_IN_ABS:
10878 CASE_BUILT_IN_ALLOCA:
10879 case BUILT_IN_BSWAP16:
10880 case BUILT_IN_BSWAP32:
10881 case BUILT_IN_BSWAP64:
10882 case BUILT_IN_BSWAP128:
10883 case BUILT_IN_CLZ:
10884 case BUILT_IN_CLZIMAX:
10885 case BUILT_IN_CLZL:
10886 case BUILT_IN_CLZLL:
10887 case BUILT_IN_CTZ:
10888 case BUILT_IN_CTZIMAX:
10889 case BUILT_IN_CTZL:
10890 case BUILT_IN_CTZLL:
10891 case BUILT_IN_FFS:
10892 case BUILT_IN_FFSIMAX:
10893 case BUILT_IN_FFSL:
10894 case BUILT_IN_FFSLL:
10895 case BUILT_IN_IMAXABS:
10896 case BUILT_IN_FINITE:
10897 case BUILT_IN_FINITEF:
10898 case BUILT_IN_FINITEL:
10899 case BUILT_IN_FINITED32:
10900 case BUILT_IN_FINITED64:
10901 case BUILT_IN_FINITED128:
10902 case BUILT_IN_FPCLASSIFY:
10903 case BUILT_IN_ISFINITE:
10904 case BUILT_IN_ISINF_SIGN:
10905 case BUILT_IN_ISINF:
10906 case BUILT_IN_ISINFF:
10907 case BUILT_IN_ISINFL:
10908 case BUILT_IN_ISINFD32:
10909 case BUILT_IN_ISINFD64:
10910 case BUILT_IN_ISINFD128:
10911 case BUILT_IN_ISNAN:
10912 case BUILT_IN_ISNANF:
10913 case BUILT_IN_ISNANL:
10914 case BUILT_IN_ISNAND32:
10915 case BUILT_IN_ISNAND64:
10916 case BUILT_IN_ISNAND128:
10917 case BUILT_IN_ISNORMAL:
10918 case BUILT_IN_ISGREATER:
10919 case BUILT_IN_ISGREATEREQUAL:
10920 case BUILT_IN_ISLESS:
10921 case BUILT_IN_ISLESSEQUAL:
10922 case BUILT_IN_ISLESSGREATER:
10923 case BUILT_IN_ISUNORDERED:
10924 case BUILT_IN_VA_ARG_PACK:
10925 case BUILT_IN_VA_ARG_PACK_LEN:
10926 case BUILT_IN_VA_COPY:
10927 case BUILT_IN_TRAP:
10928 case BUILT_IN_SAVEREGS:
10929 case BUILT_IN_POPCOUNTL:
10930 case BUILT_IN_POPCOUNTLL:
10931 case BUILT_IN_POPCOUNTIMAX:
10932 case BUILT_IN_POPCOUNT:
10933 case BUILT_IN_PARITYL:
10934 case BUILT_IN_PARITYLL:
10935 case BUILT_IN_PARITYIMAX:
10936 case BUILT_IN_PARITY:
10937 case BUILT_IN_LABS:
10938 case BUILT_IN_LLABS:
10939 case BUILT_IN_PREFETCH:
10940 case BUILT_IN_ACC_ON_DEVICE:
10941 return true;
10943 default:
10944 return is_simple_builtin (decl);
10947 return false;
10950 /* Return true if T is a constant and the value cast to a target char
10951 can be represented by a host char.
10952 Store the casted char constant in *P if so. */
10954 bool
10955 target_char_cst_p (tree t, char *p)
10957 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10958 return false;
10960 *p = (char)tree_to_uhwi (t);
10961 return true;
10964 /* Return true if the builtin DECL is implemented in a standard library.
10965 Otherwise return false which doesn't guarantee it is not (thus the list
10966 of handled builtins below may be incomplete). */
10968 bool
10969 builtin_with_linkage_p (tree decl)
10971 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10972 switch (DECL_FUNCTION_CODE (decl))
10974 CASE_FLT_FN (BUILT_IN_ACOS):
10975 CASE_FLT_FN (BUILT_IN_ACOSH):
10976 CASE_FLT_FN (BUILT_IN_ASIN):
10977 CASE_FLT_FN (BUILT_IN_ASINH):
10978 CASE_FLT_FN (BUILT_IN_ATAN):
10979 CASE_FLT_FN (BUILT_IN_ATANH):
10980 CASE_FLT_FN (BUILT_IN_ATAN2):
10981 CASE_FLT_FN (BUILT_IN_CBRT):
10982 CASE_FLT_FN (BUILT_IN_CEIL):
10983 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
10984 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10985 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
10986 CASE_FLT_FN (BUILT_IN_COS):
10987 CASE_FLT_FN (BUILT_IN_COSH):
10988 CASE_FLT_FN (BUILT_IN_ERF):
10989 CASE_FLT_FN (BUILT_IN_ERFC):
10990 CASE_FLT_FN (BUILT_IN_EXP):
10991 CASE_FLT_FN (BUILT_IN_EXP2):
10992 CASE_FLT_FN (BUILT_IN_EXPM1):
10993 CASE_FLT_FN (BUILT_IN_FABS):
10994 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
10995 CASE_FLT_FN (BUILT_IN_FDIM):
10996 CASE_FLT_FN (BUILT_IN_FLOOR):
10997 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
10998 CASE_FLT_FN (BUILT_IN_FMA):
10999 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
11000 CASE_FLT_FN (BUILT_IN_FMAX):
11001 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
11002 CASE_FLT_FN (BUILT_IN_FMIN):
11003 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
11004 CASE_FLT_FN (BUILT_IN_FMOD):
11005 CASE_FLT_FN (BUILT_IN_FREXP):
11006 CASE_FLT_FN (BUILT_IN_HYPOT):
11007 CASE_FLT_FN (BUILT_IN_ILOGB):
11008 CASE_FLT_FN (BUILT_IN_LDEXP):
11009 CASE_FLT_FN (BUILT_IN_LGAMMA):
11010 CASE_FLT_FN (BUILT_IN_LLRINT):
11011 CASE_FLT_FN (BUILT_IN_LLROUND):
11012 CASE_FLT_FN (BUILT_IN_LOG):
11013 CASE_FLT_FN (BUILT_IN_LOG10):
11014 CASE_FLT_FN (BUILT_IN_LOG1P):
11015 CASE_FLT_FN (BUILT_IN_LOG2):
11016 CASE_FLT_FN (BUILT_IN_LOGB):
11017 CASE_FLT_FN (BUILT_IN_LRINT):
11018 CASE_FLT_FN (BUILT_IN_LROUND):
11019 CASE_FLT_FN (BUILT_IN_MODF):
11020 CASE_FLT_FN (BUILT_IN_NAN):
11021 CASE_FLT_FN (BUILT_IN_NEARBYINT):
11022 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
11023 CASE_FLT_FN (BUILT_IN_NEXTAFTER):
11024 CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
11025 CASE_FLT_FN (BUILT_IN_POW):
11026 CASE_FLT_FN (BUILT_IN_REMAINDER):
11027 CASE_FLT_FN (BUILT_IN_REMQUO):
11028 CASE_FLT_FN (BUILT_IN_RINT):
11029 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
11030 CASE_FLT_FN (BUILT_IN_ROUND):
11031 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
11032 CASE_FLT_FN (BUILT_IN_SCALBLN):
11033 CASE_FLT_FN (BUILT_IN_SCALBN):
11034 CASE_FLT_FN (BUILT_IN_SIN):
11035 CASE_FLT_FN (BUILT_IN_SINH):
11036 CASE_FLT_FN (BUILT_IN_SINCOS):
11037 CASE_FLT_FN (BUILT_IN_SQRT):
11038 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
11039 CASE_FLT_FN (BUILT_IN_TAN):
11040 CASE_FLT_FN (BUILT_IN_TANH):
11041 CASE_FLT_FN (BUILT_IN_TGAMMA):
11042 CASE_FLT_FN (BUILT_IN_TRUNC):
11043 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
11044 return true;
11046 case BUILT_IN_STPCPY:
11047 case BUILT_IN_STPNCPY:
11048 /* stpcpy is both referenced in libiberty's pex-win32.c and provided
11049 by libiberty's stpcpy.c for MinGW targets so we need to return true
11050 in order to be able to build libiberty in LTO mode for them. */
11051 return true;
11053 default:
11054 break;
11056 return false;
11059 /* Return true if OFFRNG is bounded to a subrange of offset values
11060 valid for the largest possible object. */
11062 bool
11063 access_ref::offset_bounded () const
11065 tree min = TYPE_MIN_VALUE (ptrdiff_type_node);
11066 tree max = TYPE_MAX_VALUE (ptrdiff_type_node);
11067 return wi::to_offset (min) <= offrng[0] && offrng[1] <= wi::to_offset (max);
11070 /* If CALLEE has known side effects, fill in INFO and return true.
11071 See tree-ssa-structalias.cc:find_func_aliases
11072 for the list of builtins we might need to handle here. */
11074 attr_fnspec
11075 builtin_fnspec (tree callee)
11077 built_in_function code = DECL_FUNCTION_CODE (callee);
11079 switch (code)
11081 /* All the following functions read memory pointed to by
11082 their second argument and write memory pointed to by first
11083 argument.
11084 strcat/strncat additionally reads memory pointed to by the first
11085 argument. */
11086 case BUILT_IN_STRCAT:
11087 case BUILT_IN_STRCAT_CHK:
11088 return "1cW 1 ";
11089 case BUILT_IN_STRNCAT:
11090 case BUILT_IN_STRNCAT_CHK:
11091 return "1cW 13";
11092 case BUILT_IN_STRCPY:
11093 case BUILT_IN_STRCPY_CHK:
11094 return "1cO 1 ";
11095 case BUILT_IN_STPCPY:
11096 case BUILT_IN_STPCPY_CHK:
11097 return ".cO 1 ";
11098 case BUILT_IN_STRNCPY:
11099 case BUILT_IN_MEMCPY:
11100 case BUILT_IN_MEMMOVE:
11101 case BUILT_IN_TM_MEMCPY:
11102 case BUILT_IN_TM_MEMMOVE:
11103 case BUILT_IN_STRNCPY_CHK:
11104 case BUILT_IN_MEMCPY_CHK:
11105 case BUILT_IN_MEMMOVE_CHK:
11106 return "1cO313";
11107 case BUILT_IN_MEMPCPY:
11108 case BUILT_IN_MEMPCPY_CHK:
11109 return ".cO313";
11110 case BUILT_IN_STPNCPY:
11111 case BUILT_IN_STPNCPY_CHK:
11112 return ".cO313";
11113 case BUILT_IN_BCOPY:
11114 return ".c23O3";
11115 case BUILT_IN_BZERO:
11116 return ".cO2";
11117 case BUILT_IN_MEMCMP:
11118 case BUILT_IN_MEMCMP_EQ:
11119 case BUILT_IN_BCMP:
11120 case BUILT_IN_STRNCMP:
11121 case BUILT_IN_STRNCMP_EQ:
11122 case BUILT_IN_STRNCASECMP:
11123 return ".cR3R3";
11125 /* The following functions read memory pointed to by their
11126 first argument. */
11127 CASE_BUILT_IN_TM_LOAD (1):
11128 CASE_BUILT_IN_TM_LOAD (2):
11129 CASE_BUILT_IN_TM_LOAD (4):
11130 CASE_BUILT_IN_TM_LOAD (8):
11131 CASE_BUILT_IN_TM_LOAD (FLOAT):
11132 CASE_BUILT_IN_TM_LOAD (DOUBLE):
11133 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
11134 CASE_BUILT_IN_TM_LOAD (M64):
11135 CASE_BUILT_IN_TM_LOAD (M128):
11136 CASE_BUILT_IN_TM_LOAD (M256):
11137 case BUILT_IN_TM_LOG:
11138 case BUILT_IN_TM_LOG_1:
11139 case BUILT_IN_TM_LOG_2:
11140 case BUILT_IN_TM_LOG_4:
11141 case BUILT_IN_TM_LOG_8:
11142 case BUILT_IN_TM_LOG_FLOAT:
11143 case BUILT_IN_TM_LOG_DOUBLE:
11144 case BUILT_IN_TM_LOG_LDOUBLE:
11145 case BUILT_IN_TM_LOG_M64:
11146 case BUILT_IN_TM_LOG_M128:
11147 case BUILT_IN_TM_LOG_M256:
11148 return ".cR ";
11150 case BUILT_IN_INDEX:
11151 case BUILT_IN_RINDEX:
11152 case BUILT_IN_STRCHR:
11153 case BUILT_IN_STRLEN:
11154 case BUILT_IN_STRRCHR:
11155 return ".cR ";
11156 case BUILT_IN_STRNLEN:
11157 return ".cR2";
11159 /* These read memory pointed to by the first argument.
11160 Allocating memory does not have any side-effects apart from
11161 being the definition point for the pointer.
11162 Unix98 specifies that errno is set on allocation failure. */
11163 case BUILT_IN_STRDUP:
11164 return "mCR ";
11165 case BUILT_IN_STRNDUP:
11166 return "mCR2";
11167 /* Allocating memory does not have any side-effects apart from
11168 being the definition point for the pointer. */
11169 case BUILT_IN_MALLOC:
11170 case BUILT_IN_ALIGNED_ALLOC:
11171 case BUILT_IN_CALLOC:
11172 case BUILT_IN_GOMP_ALLOC:
11173 return "mC";
11174 CASE_BUILT_IN_ALLOCA:
11175 return "mc";
11176 /* These read memory pointed to by the first argument with size
11177 in the third argument. */
11178 case BUILT_IN_MEMCHR:
11179 return ".cR3";
11180 /* These read memory pointed to by the first and second arguments. */
11181 case BUILT_IN_STRSTR:
11182 case BUILT_IN_STRPBRK:
11183 case BUILT_IN_STRCASECMP:
11184 case BUILT_IN_STRCSPN:
11185 case BUILT_IN_STRSPN:
11186 case BUILT_IN_STRCMP:
11187 case BUILT_IN_STRCMP_EQ:
11188 return ".cR R ";
11189 /* Freeing memory kills the pointed-to memory. More importantly
11190 the call has to serve as a barrier for moving loads and stores
11191 across it. */
11192 case BUILT_IN_STACK_RESTORE:
11193 case BUILT_IN_FREE:
11194 case BUILT_IN_GOMP_FREE:
11195 return ".co ";
11196 case BUILT_IN_VA_END:
11197 return ".cO ";
11198 /* Realloc serves both as allocation point and deallocation point. */
11199 case BUILT_IN_REALLOC:
11200 return ".Cw ";
11201 case BUILT_IN_GAMMA_R:
11202 case BUILT_IN_GAMMAF_R:
11203 case BUILT_IN_GAMMAL_R:
11204 case BUILT_IN_LGAMMA_R:
11205 case BUILT_IN_LGAMMAF_R:
11206 case BUILT_IN_LGAMMAL_R:
11207 return ".C. Ot";
11208 case BUILT_IN_FREXP:
11209 case BUILT_IN_FREXPF:
11210 case BUILT_IN_FREXPL:
11211 case BUILT_IN_MODF:
11212 case BUILT_IN_MODFF:
11213 case BUILT_IN_MODFL:
11214 return ".c. Ot";
11215 case BUILT_IN_REMQUO:
11216 case BUILT_IN_REMQUOF:
11217 case BUILT_IN_REMQUOL:
11218 return ".c. . Ot";
11219 case BUILT_IN_SINCOS:
11220 case BUILT_IN_SINCOSF:
11221 case BUILT_IN_SINCOSL:
11222 return ".c. OtOt";
11223 case BUILT_IN_MEMSET:
11224 case BUILT_IN_MEMSET_CHK:
11225 case BUILT_IN_TM_MEMSET:
11226 return "1cO3";
11227 CASE_BUILT_IN_TM_STORE (1):
11228 CASE_BUILT_IN_TM_STORE (2):
11229 CASE_BUILT_IN_TM_STORE (4):
11230 CASE_BUILT_IN_TM_STORE (8):
11231 CASE_BUILT_IN_TM_STORE (FLOAT):
11232 CASE_BUILT_IN_TM_STORE (DOUBLE):
11233 CASE_BUILT_IN_TM_STORE (LDOUBLE):
11234 CASE_BUILT_IN_TM_STORE (M64):
11235 CASE_BUILT_IN_TM_STORE (M128):
11236 CASE_BUILT_IN_TM_STORE (M256):
11237 return ".cO ";
11238 case BUILT_IN_STACK_SAVE:
11239 case BUILT_IN_RETURN:
11240 case BUILT_IN_EH_POINTER:
11241 case BUILT_IN_EH_FILTER:
11242 case BUILT_IN_UNWIND_RESUME:
11243 case BUILT_IN_CXA_END_CLEANUP:
11244 case BUILT_IN_EH_COPY_VALUES:
11245 case BUILT_IN_FRAME_ADDRESS:
11246 case BUILT_IN_APPLY_ARGS:
11247 case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
11248 case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
11249 case BUILT_IN_PREFETCH:
11250 case BUILT_IN_DWARF_CFA:
11251 case BUILT_IN_RETURN_ADDRESS:
11252 return ".c";
11253 case BUILT_IN_ASSUME_ALIGNED:
11254 return "1cX ";
11255 /* But posix_memalign stores a pointer into the memory pointed to
11256 by its first argument. */
11257 case BUILT_IN_POSIX_MEMALIGN:
11258 return ".cOt";
11260 default:
11261 return "";