c++: optimize unifying nested templated classes [PR89231]
[official-gcc.git] / gcc / builtins.cc
blob3b453b3ec8c3528b1c28a59bed6e81ad9922ea2e
1 /* Expand builtin functions.
2 Copyright (C) 1988-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.cc instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-access.h"
47 #include "stor-layout.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "tree-object-size.h"
51 #include "tree-ssa-strlen.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-iterator.h"
71 #include "gimple-fold.h"
72 #include "intl.h"
73 #include "file-prefix-map.h" /* remap_macro_filename() */
74 #include "gomp-constants.h"
75 #include "omp-general.h"
76 #include "tree-dfa.h"
77 #include "gimple-ssa.h"
78 #include "tree-ssa-live.h"
79 #include "tree-outof-ssa.h"
80 #include "attr-fnspec.h"
81 #include "demangle.h"
82 #include "gimple-range.h"
83 #include "pointer-query.h"
85 struct target_builtins default_target_builtins;
86 #if SWITCHABLE_TARGET
87 struct target_builtins *this_target_builtins = &default_target_builtins;
88 #endif
90 /* Define the names of the builtin function types and codes. */
91 const char *const built_in_class_names[BUILT_IN_LAST]
92 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
94 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
95 const char * built_in_names[(int) END_BUILTINS] =
97 #include "builtins.def"
100 /* Setup an array of builtin_info_type, make sure each element decl is
101 initialized to NULL_TREE. */
102 builtin_info_type builtin_info[(int)END_BUILTINS];
104 /* Non-zero if __builtin_constant_p should be folded right away. */
105 bool force_folding_builtin_constant_p;
107 static int target_char_cast (tree, char *);
108 static int apply_args_size (void);
109 static int apply_result_size (void);
110 static rtx result_vector (int, rtx);
111 static void expand_builtin_prefetch (tree);
112 static rtx expand_builtin_apply_args (void);
113 static rtx expand_builtin_apply_args_1 (void);
114 static rtx expand_builtin_apply (rtx, rtx, rtx);
115 static void expand_builtin_return (rtx);
116 static enum type_class type_to_class (tree);
117 static rtx expand_builtin_classify_type (tree);
118 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
119 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
120 static rtx expand_builtin_interclass_mathfn (tree, rtx);
121 static rtx expand_builtin_sincos (tree);
122 static rtx expand_builtin_fegetround (tree, rtx, machine_mode);
123 static rtx expand_builtin_feclear_feraise_except (tree, rtx, machine_mode,
124 optab);
125 static rtx expand_builtin_cexpi (tree, rtx);
126 static rtx expand_builtin_issignaling (tree, rtx);
127 static rtx expand_builtin_int_roundingfn (tree, rtx);
128 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
129 static rtx expand_builtin_next_arg (void);
130 static rtx expand_builtin_va_start (tree);
131 static rtx expand_builtin_va_end (tree);
132 static rtx expand_builtin_va_copy (tree);
133 static rtx inline_expand_builtin_bytecmp (tree, rtx);
134 static rtx expand_builtin_strcmp (tree, rtx);
135 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
136 static rtx expand_builtin_memcpy (tree, rtx);
137 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
138 rtx target, tree exp,
139 memop_ret retmode,
140 bool might_overlap);
141 static rtx expand_builtin_memmove (tree, rtx);
142 static rtx expand_builtin_mempcpy (tree, rtx);
143 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
144 static rtx expand_builtin_strcpy (tree, rtx);
145 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
146 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
147 static rtx expand_builtin_strncpy (tree, rtx);
148 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
149 static rtx expand_builtin_bzero (tree);
150 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
151 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
152 static rtx expand_builtin_alloca (tree);
153 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
154 static rtx expand_builtin_frame_address (tree, tree);
155 static tree stabilize_va_list_loc (location_t, tree, int);
156 static rtx expand_builtin_expect (tree, rtx);
157 static rtx expand_builtin_expect_with_probability (tree, rtx);
158 static tree fold_builtin_constant_p (tree);
159 static tree fold_builtin_classify_type (tree);
160 static tree fold_builtin_strlen (location_t, tree, tree, tree);
161 static tree fold_builtin_inf (location_t, tree, int);
162 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
163 static bool validate_arg (const_tree, enum tree_code code);
164 static rtx expand_builtin_fabs (tree, rtx, rtx);
165 static rtx expand_builtin_signbit (tree, rtx);
166 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
167 static tree fold_builtin_isascii (location_t, tree);
168 static tree fold_builtin_toascii (location_t, tree);
169 static tree fold_builtin_isdigit (location_t, tree);
170 static tree fold_builtin_fabs (location_t, tree, tree);
171 static tree fold_builtin_abs (location_t, tree, tree);
172 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
173 enum tree_code);
174 static tree fold_builtin_iseqsig (location_t, tree, tree);
175 static tree fold_builtin_varargs (location_t, tree, tree*, int);
177 static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
178 static tree fold_builtin_strspn (location_t, tree, tree, tree);
179 static tree fold_builtin_strcspn (location_t, tree, tree, tree);
181 static rtx expand_builtin_object_size (tree);
182 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
183 enum built_in_function);
184 static void maybe_emit_chk_warning (tree, enum built_in_function);
185 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
186 static tree fold_builtin_object_size (tree, tree, enum built_in_function);
188 unsigned HOST_WIDE_INT target_newline;
189 unsigned HOST_WIDE_INT target_percent;
190 static unsigned HOST_WIDE_INT target_c;
191 static unsigned HOST_WIDE_INT target_s;
192 char target_percent_c[3];
193 char target_percent_s[3];
194 char target_percent_s_newline[4];
195 static tree do_mpfr_remquo (tree, tree, tree);
196 static tree do_mpfr_lgamma_r (tree, tree, tree);
197 static void expand_builtin_sync_synchronize (void);
199 /* Return true if NAME starts with __builtin_ or __sync_. */
201 static bool
202 is_builtin_name (const char *name)
204 return (startswith (name, "__builtin_")
205 || startswith (name, "__sync_")
206 || startswith (name, "__atomic_"));
209 /* Return true if NODE should be considered for inline expansion regardless
210 of the optimization level. This means whenever a function is invoked with
211 its "internal" name, which normally contains the prefix "__builtin". */
213 bool
214 called_as_built_in (tree node)
216 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
217 we want the name used to call the function, not the name it
218 will have. */
219 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
220 return is_builtin_name (name);
223 /* Compute values M and N such that M divides (address of EXP - N) and such
224 that N < M. If these numbers can be determined, store M in alignp and N in
225 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
226 *alignp and any bit-offset to *bitposp.
228 Note that the address (and thus the alignment) computed here is based
229 on the address to which a symbol resolves, whereas DECL_ALIGN is based
230 on the address at which an object is actually located. These two
231 addresses are not always the same. For example, on ARM targets,
232 the address &foo of a Thumb function foo() has the lowest bit set,
233 whereas foo() itself starts on an even address.
235 If ADDR_P is true we are taking the address of the memory reference EXP
236 and thus cannot rely on the access taking place. */
238 bool
239 get_object_alignment_2 (tree exp, unsigned int *alignp,
240 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
242 poly_int64 bitsize, bitpos;
243 tree offset;
244 machine_mode mode;
245 int unsignedp, reversep, volatilep;
246 unsigned int align = BITS_PER_UNIT;
247 bool known_alignment = false;
249 /* Get the innermost object and the constant (bitpos) and possibly
250 variable (offset) offset of the access. */
251 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
252 &unsignedp, &reversep, &volatilep);
254 /* Extract alignment information from the innermost object and
255 possibly adjust bitpos and offset. */
256 if (TREE_CODE (exp) == FUNCTION_DECL)
258 /* Function addresses can encode extra information besides their
259 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
260 allows the low bit to be used as a virtual bit, we know
261 that the address itself must be at least 2-byte aligned. */
262 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
263 align = 2 * BITS_PER_UNIT;
265 else if (TREE_CODE (exp) == LABEL_DECL)
267 else if (TREE_CODE (exp) == CONST_DECL)
269 /* The alignment of a CONST_DECL is determined by its initializer. */
270 exp = DECL_INITIAL (exp);
271 align = TYPE_ALIGN (TREE_TYPE (exp));
272 if (CONSTANT_CLASS_P (exp))
273 align = targetm.constant_alignment (exp, align);
275 known_alignment = true;
277 else if (DECL_P (exp))
279 align = DECL_ALIGN (exp);
280 known_alignment = true;
282 else if (TREE_CODE (exp) == INDIRECT_REF
283 || TREE_CODE (exp) == MEM_REF
284 || TREE_CODE (exp) == TARGET_MEM_REF)
286 tree addr = TREE_OPERAND (exp, 0);
287 unsigned ptr_align;
288 unsigned HOST_WIDE_INT ptr_bitpos;
289 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
291 /* If the address is explicitely aligned, handle that. */
292 if (TREE_CODE (addr) == BIT_AND_EXPR
293 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
295 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
296 ptr_bitmask *= BITS_PER_UNIT;
297 align = least_bit_hwi (ptr_bitmask);
298 addr = TREE_OPERAND (addr, 0);
301 known_alignment
302 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
303 align = MAX (ptr_align, align);
305 /* Re-apply explicit alignment to the bitpos. */
306 ptr_bitpos &= ptr_bitmask;
308 /* The alignment of the pointer operand in a TARGET_MEM_REF
309 has to take the variable offset parts into account. */
310 if (TREE_CODE (exp) == TARGET_MEM_REF)
312 if (TMR_INDEX (exp))
314 unsigned HOST_WIDE_INT step = 1;
315 if (TMR_STEP (exp))
316 step = TREE_INT_CST_LOW (TMR_STEP (exp));
317 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
319 if (TMR_INDEX2 (exp))
320 align = BITS_PER_UNIT;
321 known_alignment = false;
324 /* When EXP is an actual memory reference then we can use
325 TYPE_ALIGN of a pointer indirection to derive alignment.
326 Do so only if get_pointer_alignment_1 did not reveal absolute
327 alignment knowledge and if using that alignment would
328 improve the situation. */
329 unsigned int talign;
330 if (!addr_p && !known_alignment
331 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
332 && talign > align)
333 align = talign;
334 else
336 /* Else adjust bitpos accordingly. */
337 bitpos += ptr_bitpos;
338 if (TREE_CODE (exp) == MEM_REF
339 || TREE_CODE (exp) == TARGET_MEM_REF)
340 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
343 else if (TREE_CODE (exp) == STRING_CST)
345 /* STRING_CST are the only constant objects we allow to be not
346 wrapped inside a CONST_DECL. */
347 align = TYPE_ALIGN (TREE_TYPE (exp));
348 if (CONSTANT_CLASS_P (exp))
349 align = targetm.constant_alignment (exp, align);
351 known_alignment = true;
354 /* If there is a non-constant offset part extract the maximum
355 alignment that can prevail. */
356 if (offset)
358 unsigned int trailing_zeros = tree_ctz (offset);
359 if (trailing_zeros < HOST_BITS_PER_INT)
361 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
362 if (inner)
363 align = MIN (align, inner);
367 /* Account for the alignment of runtime coefficients, so that the constant
368 bitpos is guaranteed to be accurate. */
369 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
370 if (alt_align != 0 && alt_align < align)
372 align = alt_align;
373 known_alignment = false;
376 *alignp = align;
377 *bitposp = bitpos.coeffs[0] & (align - 1);
378 return known_alignment;
381 /* For a memory reference expression EXP compute values M and N such that M
382 divides (&EXP - N) and such that N < M. If these numbers can be determined,
383 store M in alignp and N in *BITPOSP and return true. Otherwise return false
384 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
386 bool
387 get_object_alignment_1 (tree exp, unsigned int *alignp,
388 unsigned HOST_WIDE_INT *bitposp)
390 /* Strip a WITH_SIZE_EXPR, get_inner_reference doesn't know how to deal
391 with it. */
392 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
393 exp = TREE_OPERAND (exp, 0);
394 return get_object_alignment_2 (exp, alignp, bitposp, false);
397 /* Return the alignment in bits of EXP, an object. */
399 unsigned int
400 get_object_alignment (tree exp)
402 unsigned HOST_WIDE_INT bitpos = 0;
403 unsigned int align;
405 get_object_alignment_1 (exp, &align, &bitpos);
407 /* align and bitpos now specify known low bits of the pointer.
408 ptr & (align - 1) == bitpos. */
410 if (bitpos != 0)
411 align = least_bit_hwi (bitpos);
412 return align;
415 /* For a pointer valued expression EXP compute values M and N such that M
416 divides (EXP - N) and such that N < M. If these numbers can be determined,
417 store M in alignp and N in *BITPOSP and return true. Return false if
418 the results are just a conservative approximation.
420 If EXP is not a pointer, false is returned too. */
422 bool
423 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
424 unsigned HOST_WIDE_INT *bitposp)
426 STRIP_NOPS (exp);
428 if (TREE_CODE (exp) == ADDR_EXPR)
429 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
430 alignp, bitposp, true);
431 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
433 unsigned int align;
434 unsigned HOST_WIDE_INT bitpos;
435 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
436 &align, &bitpos);
437 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
438 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
439 else
441 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
442 if (trailing_zeros < HOST_BITS_PER_INT)
444 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
445 if (inner)
446 align = MIN (align, inner);
449 *alignp = align;
450 *bitposp = bitpos & (align - 1);
451 return res;
453 else if (TREE_CODE (exp) == SSA_NAME
454 && POINTER_TYPE_P (TREE_TYPE (exp)))
456 unsigned int ptr_align, ptr_misalign;
457 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
459 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
461 *bitposp = ptr_misalign * BITS_PER_UNIT;
462 *alignp = ptr_align * BITS_PER_UNIT;
463 /* Make sure to return a sensible alignment when the multiplication
464 by BITS_PER_UNIT overflowed. */
465 if (*alignp == 0)
466 *alignp = 1u << (HOST_BITS_PER_INT - 1);
467 /* We cannot really tell whether this result is an approximation. */
468 return false;
470 else
472 *bitposp = 0;
473 *alignp = BITS_PER_UNIT;
474 return false;
477 else if (TREE_CODE (exp) == INTEGER_CST)
479 *alignp = BIGGEST_ALIGNMENT;
480 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
481 & (BIGGEST_ALIGNMENT - 1));
482 return true;
485 *bitposp = 0;
486 *alignp = BITS_PER_UNIT;
487 return false;
490 /* Return the alignment in bits of EXP, a pointer valued expression.
491 The alignment returned is, by default, the alignment of the thing that
492 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
494 Otherwise, look at the expression to see if we can do better, i.e., if the
495 expression is actually pointing at an object whose alignment is tighter. */
497 unsigned int
498 get_pointer_alignment (tree exp)
500 unsigned HOST_WIDE_INT bitpos = 0;
501 unsigned int align;
503 get_pointer_alignment_1 (exp, &align, &bitpos);
505 /* align and bitpos now specify known low bits of the pointer.
506 ptr & (align - 1) == bitpos. */
508 if (bitpos != 0)
509 align = least_bit_hwi (bitpos);
511 return align;
514 /* Return the number of leading non-zero elements in the sequence
515 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
516 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
518 unsigned
519 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
521 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
523 unsigned n;
525 if (eltsize == 1)
527 /* Optimize the common case of plain char. */
528 for (n = 0; n < maxelts; n++)
530 const char *elt = (const char*) ptr + n;
531 if (!*elt)
532 break;
535 else
537 for (n = 0; n < maxelts; n++)
539 const char *elt = (const char*) ptr + n * eltsize;
540 if (!memcmp (elt, "\0\0\0\0", eltsize))
541 break;
544 return n;
547 /* Compute the length of a null-terminated character string or wide
548 character string handling character sizes of 1, 2, and 4 bytes.
549 TREE_STRING_LENGTH is not the right way because it evaluates to
550 the size of the character array in bytes (as opposed to characters)
551 and because it can contain a zero byte in the middle.
553 ONLY_VALUE should be nonzero if the result is not going to be emitted
554 into the instruction stream and zero if it is going to be expanded.
555 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
556 is returned, otherwise NULL, since
557 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
558 evaluate the side-effects.
560 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
561 accesses. Note that this implies the result is not going to be emitted
562 into the instruction stream.
564 Additional information about the string accessed may be recorded
565 in DATA. For example, if ARG references an unterminated string,
566 then the declaration will be stored in the DECL field. If the
567 length of the unterminated string can be determined, it'll be
568 stored in the LEN field. Note this length could well be different
569 than what a C strlen call would return.
571 ELTSIZE is 1 for normal single byte character strings, and 2 or
572 4 for wide characer strings. ELTSIZE is by default 1.
574 The value returned is of type `ssizetype'. */
576 tree
577 c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
579 /* If we were not passed a DATA pointer, then get one to a local
580 structure. That avoids having to check DATA for NULL before
581 each time we want to use it. */
582 c_strlen_data local_strlen_data = { };
583 if (!data)
584 data = &local_strlen_data;
586 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
588 tree src = STRIP_NOPS (arg);
589 if (TREE_CODE (src) == COND_EXPR
590 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
592 tree len1, len2;
594 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
595 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
596 if (tree_int_cst_equal (len1, len2))
597 return len1;
600 if (TREE_CODE (src) == COMPOUND_EXPR
601 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
602 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
604 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
606 /* Offset from the beginning of the string in bytes. */
607 tree byteoff;
608 tree memsize;
609 tree decl;
610 src = string_constant (src, &byteoff, &memsize, &decl);
611 if (src == 0)
612 return NULL_TREE;
614 /* Determine the size of the string element. */
615 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
616 return NULL_TREE;
618 /* Set MAXELTS to ARRAY_SIZE (SRC) - 1, the maximum possible
619 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
620 in case the latter is less than the size of the array, such as when
621 SRC refers to a short string literal used to initialize a large array.
622 In that case, the elements of the array after the terminating NUL are
623 all NUL. */
624 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
625 strelts = strelts / eltsize;
627 if (!tree_fits_uhwi_p (memsize))
628 return NULL_TREE;
630 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
632 /* PTR can point to the byte representation of any string type, including
633 char* and wchar_t*. */
634 const char *ptr = TREE_STRING_POINTER (src);
636 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
638 /* The code below works only for single byte character types. */
639 if (eltsize != 1)
640 return NULL_TREE;
642 /* If the string has an internal NUL character followed by any
643 non-NUL characters (e.g., "foo\0bar"), we can't compute
644 the offset to the following NUL if we don't know where to
645 start searching for it. */
646 unsigned len = string_length (ptr, eltsize, strelts);
648 /* Return when an embedded null character is found or none at all.
649 In the latter case, set the DECL/LEN field in the DATA structure
650 so that callers may examine them. */
651 if (len + 1 < strelts)
652 return NULL_TREE;
653 else if (len >= maxelts)
655 data->decl = decl;
656 data->off = byteoff;
657 data->minlen = ssize_int (len);
658 return NULL_TREE;
661 /* For empty strings the result should be zero. */
662 if (len == 0)
663 return ssize_int (0);
665 /* We don't know the starting offset, but we do know that the string
666 has no internal zero bytes. If the offset falls within the bounds
667 of the string subtract the offset from the length of the string,
668 and return that. Otherwise the length is zero. Take care to
669 use SAVE_EXPR in case the OFFSET has side-effects. */
670 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
671 : byteoff;
672 offsave = fold_convert_loc (loc, sizetype, offsave);
673 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
674 size_int (len));
675 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
676 offsave);
677 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
678 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
679 build_zero_cst (ssizetype));
682 /* Offset from the beginning of the string in elements. */
683 HOST_WIDE_INT eltoff;
685 /* We have a known offset into the string. Start searching there for
686 a null character if we can represent it as a single HOST_WIDE_INT. */
687 if (byteoff == 0)
688 eltoff = 0;
689 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
690 eltoff = -1;
691 else
692 eltoff = tree_to_uhwi (byteoff) / eltsize;
694 /* If the offset is known to be out of bounds, warn, and call strlen at
695 runtime. */
696 if (eltoff < 0 || eltoff >= maxelts)
698 /* Suppress multiple warnings for propagated constant strings. */
699 if (only_value != 2
700 && !warning_suppressed_p (arg, OPT_Warray_bounds_)
701 && warning_at (loc, OPT_Warray_bounds_,
702 "offset %qwi outside bounds of constant string",
703 eltoff))
705 if (decl)
706 inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
707 suppress_warning (arg, OPT_Warray_bounds_);
709 return NULL_TREE;
712 /* If eltoff is larger than strelts but less than maxelts the
713 string length is zero, since the excess memory will be zero. */
714 if (eltoff > strelts)
715 return ssize_int (0);
717 /* Use strlen to search for the first zero byte. Since any strings
718 constructed with build_string will have nulls appended, we win even
719 if we get handed something like (char[4])"abcd".
721 Since ELTOFF is our starting index into the string, no further
722 calculation is needed. */
723 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
724 strelts - eltoff);
726 /* Don't know what to return if there was no zero termination.
727 Ideally this would turn into a gcc_checking_assert over time.
728 Set DECL/LEN so callers can examine them. */
729 if (len >= maxelts - eltoff)
731 data->decl = decl;
732 data->off = byteoff;
733 data->minlen = ssize_int (len);
734 return NULL_TREE;
737 return ssize_int (len);
740 /* Return a constant integer corresponding to target reading
741 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
742 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
743 are assumed to be zero, otherwise it reads as many characters
744 as needed. */
747 c_readstr (const char *str, scalar_int_mode mode,
748 bool null_terminated_p/*=true*/)
750 HOST_WIDE_INT ch;
751 unsigned int i, j;
752 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
754 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
755 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
756 / HOST_BITS_PER_WIDE_INT;
758 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
759 for (i = 0; i < len; i++)
760 tmp[i] = 0;
762 ch = 1;
763 for (i = 0; i < GET_MODE_SIZE (mode); i++)
765 j = i;
766 if (WORDS_BIG_ENDIAN)
767 j = GET_MODE_SIZE (mode) - i - 1;
768 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
769 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
770 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
771 j *= BITS_PER_UNIT;
773 if (ch || !null_terminated_p)
774 ch = (unsigned char) str[i];
775 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
778 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
779 return immed_wide_int_const (c, mode);
782 /* Cast a target constant CST to target CHAR and if that value fits into
783 host char type, return zero and put that value into variable pointed to by
784 P. */
786 static int
787 target_char_cast (tree cst, char *p)
789 unsigned HOST_WIDE_INT val, hostval;
791 if (TREE_CODE (cst) != INTEGER_CST
792 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
793 return 1;
795 /* Do not care if it fits or not right here. */
796 val = TREE_INT_CST_LOW (cst);
798 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
799 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
801 hostval = val;
802 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
803 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
805 if (val != hostval)
806 return 1;
808 *p = hostval;
809 return 0;
812 /* Similar to save_expr, but assumes that arbitrary code is not executed
813 in between the multiple evaluations. In particular, we assume that a
814 non-addressable local variable will not be modified. */
816 static tree
817 builtin_save_expr (tree exp)
819 if (TREE_CODE (exp) == SSA_NAME
820 || (TREE_ADDRESSABLE (exp) == 0
821 && (TREE_CODE (exp) == PARM_DECL
822 || (VAR_P (exp) && !TREE_STATIC (exp)))))
823 return exp;
825 return save_expr (exp);
828 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
829 times to get the address of either a higher stack frame, or a return
830 address located within it (depending on FNDECL_CODE). */
832 static rtx
833 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
835 int i;
836 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
837 if (tem == NULL_RTX)
839 /* For a zero count with __builtin_return_address, we don't care what
840 frame address we return, because target-specific definitions will
841 override us. Therefore frame pointer elimination is OK, and using
842 the soft frame pointer is OK.
844 For a nonzero count, or a zero count with __builtin_frame_address,
845 we require a stable offset from the current frame pointer to the
846 previous one, so we must use the hard frame pointer, and
847 we must disable frame pointer elimination. */
848 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
849 tem = frame_pointer_rtx;
850 else
852 tem = hard_frame_pointer_rtx;
854 /* Tell reload not to eliminate the frame pointer. */
855 crtl->accesses_prior_frames = 1;
859 if (count > 0)
860 SETUP_FRAME_ADDRESSES ();
862 /* On the SPARC, the return address is not in the frame, it is in a
863 register. There is no way to access it off of the current frame
864 pointer, but it can be accessed off the previous frame pointer by
865 reading the value from the register window save area. */
866 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
867 count--;
869 /* Scan back COUNT frames to the specified frame. */
870 for (i = 0; i < count; i++)
872 /* Assume the dynamic chain pointer is in the word that the
873 frame address points to, unless otherwise specified. */
874 tem = DYNAMIC_CHAIN_ADDRESS (tem);
875 tem = memory_address (Pmode, tem);
876 tem = gen_frame_mem (Pmode, tem);
877 tem = copy_to_reg (tem);
880 /* For __builtin_frame_address, return what we've got. But, on
881 the SPARC for example, we may have to add a bias. */
882 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
883 return FRAME_ADDR_RTX (tem);
885 /* For __builtin_return_address, get the return address from that frame. */
886 #ifdef RETURN_ADDR_RTX
887 tem = RETURN_ADDR_RTX (count, tem);
888 #else
889 tem = memory_address (Pmode,
890 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
891 tem = gen_frame_mem (Pmode, tem);
892 #endif
893 return tem;
896 /* Alias set used for setjmp buffer. */
897 static alias_set_type setjmp_alias_set = -1;
899 /* Construct the leading half of a __builtin_setjmp call. Control will
900 return to RECEIVER_LABEL. This is also called directly by the SJLJ
901 exception handling code. */
903 void
904 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
906 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
907 rtx stack_save;
908 rtx mem;
910 if (setjmp_alias_set == -1)
911 setjmp_alias_set = new_alias_set ();
913 buf_addr = convert_memory_address (Pmode, buf_addr);
915 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
917 /* We store the frame pointer and the address of receiver_label in
918 the buffer and use the rest of it for the stack save area, which
919 is machine-dependent. */
921 mem = gen_rtx_MEM (Pmode, buf_addr);
922 set_mem_alias_set (mem, setjmp_alias_set);
923 emit_move_insn (mem, hard_frame_pointer_rtx);
925 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
926 GET_MODE_SIZE (Pmode))),
927 set_mem_alias_set (mem, setjmp_alias_set);
929 emit_move_insn (validize_mem (mem),
930 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
932 stack_save = gen_rtx_MEM (sa_mode,
933 plus_constant (Pmode, buf_addr,
934 2 * GET_MODE_SIZE (Pmode)));
935 set_mem_alias_set (stack_save, setjmp_alias_set);
936 emit_stack_save (SAVE_NONLOCAL, &stack_save);
938 /* If there is further processing to do, do it. */
939 if (targetm.have_builtin_setjmp_setup ())
940 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
942 /* We have a nonlocal label. */
943 cfun->has_nonlocal_label = 1;
946 /* Construct the trailing part of a __builtin_setjmp call. This is
947 also called directly by the SJLJ exception handling code.
948 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
950 void
951 expand_builtin_setjmp_receiver (rtx receiver_label)
953 rtx chain;
955 /* Mark the FP as used when we get here, so we have to make sure it's
956 marked as used by this function. */
957 emit_use (hard_frame_pointer_rtx);
959 /* Mark the static chain as clobbered here so life information
960 doesn't get messed up for it. */
961 chain = rtx_for_static_chain (current_function_decl, true);
962 if (chain && REG_P (chain))
963 emit_clobber (chain);
965 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
967 /* If the argument pointer can be eliminated in favor of the
968 frame pointer, we don't need to restore it. We assume here
969 that if such an elimination is present, it can always be used.
970 This is the case on all known machines; if we don't make this
971 assumption, we do unnecessary saving on many machines. */
972 size_t i;
973 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
975 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
976 if (elim_regs[i].from == ARG_POINTER_REGNUM
977 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
978 break;
980 if (i == ARRAY_SIZE (elim_regs))
982 /* Now restore our arg pointer from the address at which it
983 was saved in our stack frame. */
984 emit_move_insn (crtl->args.internal_arg_pointer,
985 copy_to_reg (get_arg_pointer_save_area ()));
989 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
990 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
991 else if (targetm.have_nonlocal_goto_receiver ())
992 emit_insn (targetm.gen_nonlocal_goto_receiver ());
993 else
994 { /* Nothing */ }
996 /* We must not allow the code we just generated to be reordered by
997 scheduling. Specifically, the update of the frame pointer must
998 happen immediately, not later. */
999 emit_insn (gen_blockage ());
1002 /* __builtin_longjmp is passed a pointer to an array of five words (not
1003 all will be used on all machines). It operates similarly to the C
1004 library function of the same name, but is more efficient. Much of
1005 the code below is copied from the handling of non-local gotos. */
1007 static void
1008 expand_builtin_longjmp (rtx buf_addr, rtx value)
1010 rtx fp, lab, stack;
1011 rtx_insn *insn, *last;
1012 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1014 /* DRAP is needed for stack realign if longjmp is expanded to current
1015 function */
1016 if (SUPPORTS_STACK_ALIGNMENT)
1017 crtl->need_drap = true;
1019 if (setjmp_alias_set == -1)
1020 setjmp_alias_set = new_alias_set ();
1022 buf_addr = convert_memory_address (Pmode, buf_addr);
1024 buf_addr = force_reg (Pmode, buf_addr);
1026 /* We require that the user must pass a second argument of 1, because
1027 that is what builtin_setjmp will return. */
1028 gcc_assert (value == const1_rtx);
1030 last = get_last_insn ();
1031 if (targetm.have_builtin_longjmp ())
1032 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1033 else
1035 fp = gen_rtx_MEM (Pmode, buf_addr);
1036 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1037 GET_MODE_SIZE (Pmode)));
1039 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1040 2 * GET_MODE_SIZE (Pmode)));
1041 set_mem_alias_set (fp, setjmp_alias_set);
1042 set_mem_alias_set (lab, setjmp_alias_set);
1043 set_mem_alias_set (stack, setjmp_alias_set);
1045 /* Pick up FP, label, and SP from the block and jump. This code is
1046 from expand_goto in stmt.cc; see there for detailed comments. */
1047 if (targetm.have_nonlocal_goto ())
1048 /* We have to pass a value to the nonlocal_goto pattern that will
1049 get copied into the static_chain pointer, but it does not matter
1050 what that value is, because builtin_setjmp does not use it. */
1051 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1052 else
1054 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1055 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1057 lab = copy_to_reg (lab);
1059 /* Restore the frame pointer and stack pointer. We must use a
1060 temporary since the setjmp buffer may be a local. */
1061 fp = copy_to_reg (fp);
1062 emit_stack_restore (SAVE_NONLOCAL, stack);
1064 /* Ensure the frame pointer move is not optimized. */
1065 emit_insn (gen_blockage ());
1066 emit_clobber (hard_frame_pointer_rtx);
1067 emit_clobber (frame_pointer_rtx);
1068 emit_move_insn (hard_frame_pointer_rtx, fp);
1070 emit_use (hard_frame_pointer_rtx);
1071 emit_use (stack_pointer_rtx);
1072 emit_indirect_jump (lab);
1076 /* Search backwards and mark the jump insn as a non-local goto.
1077 Note that this precludes the use of __builtin_longjmp to a
1078 __builtin_setjmp target in the same function. However, we've
1079 already cautioned the user that these functions are for
1080 internal exception handling use only. */
1081 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1083 gcc_assert (insn != last);
1085 if (JUMP_P (insn))
1087 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1088 break;
1090 else if (CALL_P (insn))
1091 break;
1095 static inline bool
1096 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1098 return (iter->i < iter->n);
1101 /* This function validates the types of a function call argument list
1102 against a specified list of tree_codes. If the last specifier is a 0,
1103 that represents an ellipsis, otherwise the last specifier must be a
1104 VOID_TYPE. */
1106 static bool
1107 validate_arglist (const_tree callexpr, ...)
1109 enum tree_code code;
1110 bool res = 0;
1111 va_list ap;
1112 const_call_expr_arg_iterator iter;
1113 const_tree arg;
1115 va_start (ap, callexpr);
1116 init_const_call_expr_arg_iterator (callexpr, &iter);
1118 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1119 tree fn = CALL_EXPR_FN (callexpr);
1120 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1122 for (unsigned argno = 1; ; ++argno)
1124 code = (enum tree_code) va_arg (ap, int);
1126 switch (code)
1128 case 0:
1129 /* This signifies an ellipses, any further arguments are all ok. */
1130 res = true;
1131 goto end;
1132 case VOID_TYPE:
1133 /* This signifies an endlink, if no arguments remain, return
1134 true, otherwise return false. */
1135 res = !more_const_call_expr_args_p (&iter);
1136 goto end;
1137 case POINTER_TYPE:
1138 /* The actual argument must be nonnull when either the whole
1139 called function has been declared nonnull, or when the formal
1140 argument corresponding to the actual argument has been. */
1141 if (argmap
1142 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1144 arg = next_const_call_expr_arg (&iter);
1145 if (!validate_arg (arg, code) || integer_zerop (arg))
1146 goto end;
1147 break;
1149 /* FALLTHRU */
1150 default:
1151 /* If no parameters remain or the parameter's code does not
1152 match the specified code, return false. Otherwise continue
1153 checking any remaining arguments. */
1154 arg = next_const_call_expr_arg (&iter);
1155 if (!validate_arg (arg, code))
1156 goto end;
1157 break;
1161 /* We need gotos here since we can only have one VA_CLOSE in a
1162 function. */
1163 end: ;
1164 va_end (ap);
1166 BITMAP_FREE (argmap);
1168 return res;
1171 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1172 and the address of the save area. */
1174 static rtx
1175 expand_builtin_nonlocal_goto (tree exp)
1177 tree t_label, t_save_area;
1178 rtx r_label, r_save_area, r_fp, r_sp;
1179 rtx_insn *insn;
1181 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1182 return NULL_RTX;
1184 t_label = CALL_EXPR_ARG (exp, 0);
1185 t_save_area = CALL_EXPR_ARG (exp, 1);
1187 r_label = expand_normal (t_label);
1188 r_label = convert_memory_address (Pmode, r_label);
1189 r_save_area = expand_normal (t_save_area);
1190 r_save_area = convert_memory_address (Pmode, r_save_area);
1191 /* Copy the address of the save location to a register just in case it was
1192 based on the frame pointer. */
1193 r_save_area = copy_to_reg (r_save_area);
1194 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1195 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1196 plus_constant (Pmode, r_save_area,
1197 GET_MODE_SIZE (Pmode)));
1199 crtl->has_nonlocal_goto = 1;
1201 /* ??? We no longer need to pass the static chain value, afaik. */
1202 if (targetm.have_nonlocal_goto ())
1203 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1204 else
1206 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1207 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1209 r_label = copy_to_reg (r_label);
1211 /* Restore the frame pointer and stack pointer. We must use a
1212 temporary since the setjmp buffer may be a local. */
1213 r_fp = copy_to_reg (r_fp);
1214 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1216 /* Ensure the frame pointer move is not optimized. */
1217 emit_insn (gen_blockage ());
1218 emit_clobber (hard_frame_pointer_rtx);
1219 emit_clobber (frame_pointer_rtx);
1220 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1222 /* USE of hard_frame_pointer_rtx added for consistency;
1223 not clear if really needed. */
1224 emit_use (hard_frame_pointer_rtx);
1225 emit_use (stack_pointer_rtx);
1227 /* If the architecture is using a GP register, we must
1228 conservatively assume that the target function makes use of it.
1229 The prologue of functions with nonlocal gotos must therefore
1230 initialize the GP register to the appropriate value, and we
1231 must then make sure that this value is live at the point
1232 of the jump. (Note that this doesn't necessarily apply
1233 to targets with a nonlocal_goto pattern; they are free
1234 to implement it in their own way. Note also that this is
1235 a no-op if the GP register is a global invariant.) */
1236 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1237 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1238 emit_use (pic_offset_table_rtx);
1240 emit_indirect_jump (r_label);
1243 /* Search backwards to the jump insn and mark it as a
1244 non-local goto. */
1245 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1247 if (JUMP_P (insn))
1249 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1250 break;
1252 else if (CALL_P (insn))
1253 break;
1256 return const0_rtx;
1259 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1260 (not all will be used on all machines) that was passed to __builtin_setjmp.
1261 It updates the stack pointer in that block to the current value. This is
1262 also called directly by the SJLJ exception handling code. */
1264 void
1265 expand_builtin_update_setjmp_buf (rtx buf_addr)
1267 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1268 buf_addr = convert_memory_address (Pmode, buf_addr);
1269 rtx stack_save
1270 = gen_rtx_MEM (sa_mode,
1271 memory_address
1272 (sa_mode,
1273 plus_constant (Pmode, buf_addr,
1274 2 * GET_MODE_SIZE (Pmode))));
1276 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1279 /* Expand a call to __builtin_prefetch. For a target that does not support
1280 data prefetch, evaluate the memory address argument in case it has side
1281 effects. */
1283 static void
1284 expand_builtin_prefetch (tree exp)
1286 tree arg0, arg1, arg2;
1287 int nargs;
1288 rtx op0, op1, op2;
1290 if (!validate_arglist (exp, POINTER_TYPE, 0))
1291 return;
1293 arg0 = CALL_EXPR_ARG (exp, 0);
1295 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1296 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1297 locality). */
1298 nargs = call_expr_nargs (exp);
1299 if (nargs > 1)
1300 arg1 = CALL_EXPR_ARG (exp, 1);
1301 else
1302 arg1 = integer_zero_node;
1303 if (nargs > 2)
1304 arg2 = CALL_EXPR_ARG (exp, 2);
1305 else
1306 arg2 = integer_three_node;
1308 /* Argument 0 is an address. */
1309 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1311 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1312 if (TREE_CODE (arg1) != INTEGER_CST)
1314 error ("second argument to %<__builtin_prefetch%> must be a constant");
1315 arg1 = integer_zero_node;
1317 op1 = expand_normal (arg1);
1318 /* Argument 1 must be either zero or one. */
1319 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1321 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1322 " using zero");
1323 op1 = const0_rtx;
1326 /* Argument 2 (locality) must be a compile-time constant int. */
1327 if (TREE_CODE (arg2) != INTEGER_CST)
1329 error ("third argument to %<__builtin_prefetch%> must be a constant");
1330 arg2 = integer_zero_node;
1332 op2 = expand_normal (arg2);
1333 /* Argument 2 must be 0, 1, 2, or 3. */
1334 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1336 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1337 op2 = const0_rtx;
1340 if (targetm.have_prefetch ())
1342 class expand_operand ops[3];
1344 create_address_operand (&ops[0], op0);
1345 create_integer_operand (&ops[1], INTVAL (op1));
1346 create_integer_operand (&ops[2], INTVAL (op2));
1347 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1348 return;
1351 /* Don't do anything with direct references to volatile memory, but
1352 generate code to handle other side effects. */
1353 if (!MEM_P (op0) && side_effects_p (op0))
1354 emit_insn (op0);
1357 /* Get a MEM rtx for expression EXP which is the address of an operand
1358 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
1359 the maximum length of the block of memory that might be accessed or
1360 NULL if unknown. */
1363 get_memory_rtx (tree exp, tree len)
1365 tree orig_exp = exp, base;
1366 rtx addr, mem;
1368 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1369 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1370 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1371 exp = TREE_OPERAND (exp, 0);
1373 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1374 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1376 /* Get an expression we can use to find the attributes to assign to MEM.
1377 First remove any nops. */
1378 while (CONVERT_EXPR_P (exp)
1379 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1380 exp = TREE_OPERAND (exp, 0);
1382 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1383 (as builtin stringops may alias with anything). */
1384 exp = fold_build2 (MEM_REF,
1385 build_array_type (char_type_node,
1386 build_range_type (sizetype,
1387 size_one_node, len)),
1388 exp, build_int_cst (ptr_type_node, 0));
1390 /* If the MEM_REF has no acceptable address, try to get the base object
1391 from the original address we got, and build an all-aliasing
1392 unknown-sized access to that one. */
1393 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1394 set_mem_attributes (mem, exp, 0);
1395 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1396 && (base = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1397 0))))
1399 unsigned int align = get_pointer_alignment (TREE_OPERAND (exp, 0));
1400 exp = build_fold_addr_expr (base);
1401 exp = fold_build2 (MEM_REF,
1402 build_array_type (char_type_node,
1403 build_range_type (sizetype,
1404 size_zero_node,
1405 NULL)),
1406 exp, build_int_cst (ptr_type_node, 0));
1407 set_mem_attributes (mem, exp, 0);
1408 /* Since we stripped parts make sure the offset is unknown and the
1409 alignment is computed from the original address. */
1410 clear_mem_offset (mem);
1411 set_mem_align (mem, align);
1413 set_mem_alias_set (mem, 0);
1414 return mem;
1417 /* Built-in functions to perform an untyped call and return. */
1419 #define apply_args_mode \
1420 (this_target_builtins->x_apply_args_mode)
1421 #define apply_result_mode \
1422 (this_target_builtins->x_apply_result_mode)
1424 /* Return the size required for the block returned by __builtin_apply_args,
1425 and initialize apply_args_mode. */
1427 static int
1428 apply_args_size (void)
1430 static int size = -1;
1431 int align;
1432 unsigned int regno;
1434 /* The values computed by this function never change. */
1435 if (size < 0)
1437 /* The first value is the incoming arg-pointer. */
1438 size = GET_MODE_SIZE (Pmode);
1440 /* The second value is the structure value address unless this is
1441 passed as an "invisible" first argument. */
1442 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1443 size += GET_MODE_SIZE (Pmode);
1445 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1446 if (FUNCTION_ARG_REGNO_P (regno))
1448 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1450 if (mode != VOIDmode)
1452 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1453 if (size % align != 0)
1454 size = CEIL (size, align) * align;
1455 size += GET_MODE_SIZE (mode);
1456 apply_args_mode[regno] = mode;
1458 else
1459 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1461 else
1462 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1464 return size;
1467 /* Return the size required for the block returned by __builtin_apply,
1468 and initialize apply_result_mode. */
1470 static int
1471 apply_result_size (void)
1473 static int size = -1;
1474 int align, regno;
1476 /* The values computed by this function never change. */
1477 if (size < 0)
1479 size = 0;
1481 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1482 if (targetm.calls.function_value_regno_p (regno))
1484 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1486 if (mode != VOIDmode)
1488 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1489 if (size % align != 0)
1490 size = CEIL (size, align) * align;
1491 size += GET_MODE_SIZE (mode);
1492 apply_result_mode[regno] = mode;
1494 else
1495 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1497 else
1498 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1500 /* Allow targets that use untyped_call and untyped_return to override
1501 the size so that machine-specific information can be stored here. */
1502 #ifdef APPLY_RESULT_SIZE
1503 size = APPLY_RESULT_SIZE;
1504 #endif
1506 return size;
1509 /* Create a vector describing the result block RESULT. If SAVEP is true,
1510 the result block is used to save the values; otherwise it is used to
1511 restore the values. */
1513 static rtx
1514 result_vector (int savep, rtx result)
1516 int regno, size, align, nelts;
1517 fixed_size_mode mode;
1518 rtx reg, mem;
1519 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1521 size = nelts = 0;
1522 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1523 if ((mode = apply_result_mode[regno]) != VOIDmode)
1525 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1526 if (size % align != 0)
1527 size = CEIL (size, align) * align;
1528 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1529 mem = adjust_address (result, mode, size);
1530 savevec[nelts++] = (savep
1531 ? gen_rtx_SET (mem, reg)
1532 : gen_rtx_SET (reg, mem));
1533 size += GET_MODE_SIZE (mode);
1535 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1538 /* Save the state required to perform an untyped call with the same
1539 arguments as were passed to the current function. */
1541 static rtx
1542 expand_builtin_apply_args_1 (void)
1544 rtx registers, tem;
1545 int size, align, regno;
1546 fixed_size_mode mode;
1547 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1549 /* Create a block where the arg-pointer, structure value address,
1550 and argument registers can be saved. */
1551 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1553 /* Walk past the arg-pointer and structure value address. */
1554 size = GET_MODE_SIZE (Pmode);
1555 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1556 size += GET_MODE_SIZE (Pmode);
1558 /* Save each register used in calling a function to the block. */
1559 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1560 if ((mode = apply_args_mode[regno]) != VOIDmode)
1562 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1563 if (size % align != 0)
1564 size = CEIL (size, align) * align;
1566 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1568 emit_move_insn (adjust_address (registers, mode, size), tem);
1569 size += GET_MODE_SIZE (mode);
1572 /* Save the arg pointer to the block. */
1573 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1574 /* We need the pointer as the caller actually passed them to us, not
1575 as we might have pretended they were passed. Make sure it's a valid
1576 operand, as emit_move_insn isn't expected to handle a PLUS. */
1577 if (STACK_GROWS_DOWNWARD)
1579 = force_operand (plus_constant (Pmode, tem,
1580 crtl->args.pretend_args_size),
1581 NULL_RTX);
1582 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1584 size = GET_MODE_SIZE (Pmode);
1586 /* Save the structure value address unless this is passed as an
1587 "invisible" first argument. */
1588 if (struct_incoming_value)
1589 emit_move_insn (adjust_address (registers, Pmode, size),
1590 copy_to_reg (struct_incoming_value));
1592 /* Return the address of the block. */
1593 return copy_addr_to_reg (XEXP (registers, 0));
1596 /* __builtin_apply_args returns block of memory allocated on
1597 the stack into which is stored the arg pointer, structure
1598 value address, static chain, and all the registers that might
1599 possibly be used in performing a function call. The code is
1600 moved to the start of the function so the incoming values are
1601 saved. */
1603 static rtx
1604 expand_builtin_apply_args (void)
1606 /* Don't do __builtin_apply_args more than once in a function.
1607 Save the result of the first call and reuse it. */
1608 if (apply_args_value != 0)
1609 return apply_args_value;
1611 /* When this function is called, it means that registers must be
1612 saved on entry to this function. So we migrate the
1613 call to the first insn of this function. */
1614 rtx temp;
1616 start_sequence ();
1617 temp = expand_builtin_apply_args_1 ();
1618 rtx_insn *seq = get_insns ();
1619 end_sequence ();
1621 apply_args_value = temp;
1623 /* Put the insns after the NOTE that starts the function.
1624 If this is inside a start_sequence, make the outer-level insn
1625 chain current, so the code is placed at the start of the
1626 function. If internal_arg_pointer is a non-virtual pseudo,
1627 it needs to be placed after the function that initializes
1628 that pseudo. */
1629 push_topmost_sequence ();
1630 if (REG_P (crtl->args.internal_arg_pointer)
1631 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1632 emit_insn_before (seq, parm_birth_insn);
1633 else
1634 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1635 pop_topmost_sequence ();
1636 return temp;
1640 /* Perform an untyped call and save the state required to perform an
1641 untyped return of whatever value was returned by the given function. */
1643 static rtx
1644 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1646 int size, align, regno;
1647 fixed_size_mode mode;
1648 rtx incoming_args, result, reg, dest, src;
1649 rtx_call_insn *call_insn;
1650 rtx old_stack_level = 0;
1651 rtx call_fusage = 0;
1652 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1654 arguments = convert_memory_address (Pmode, arguments);
1656 /* Create a block where the return registers can be saved. */
1657 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1659 /* Fetch the arg pointer from the ARGUMENTS block. */
1660 incoming_args = gen_reg_rtx (Pmode);
1661 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1662 if (!STACK_GROWS_DOWNWARD)
1663 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1664 incoming_args, 0, OPTAB_LIB_WIDEN);
1666 /* Push a new argument block and copy the arguments. Do not allow
1667 the (potential) memcpy call below to interfere with our stack
1668 manipulations. */
1669 do_pending_stack_adjust ();
1670 NO_DEFER_POP;
1672 /* Save the stack with nonlocal if available. */
1673 if (targetm.have_save_stack_nonlocal ())
1674 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1675 else
1676 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1678 /* Allocate a block of memory onto the stack and copy the memory
1679 arguments to the outgoing arguments address. We can pass TRUE
1680 as the 4th argument because we just saved the stack pointer
1681 and will restore it right after the call. */
1682 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1684 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1685 may have already set current_function_calls_alloca to true.
1686 current_function_calls_alloca won't be set if argsize is zero,
1687 so we have to guarantee need_drap is true here. */
1688 if (SUPPORTS_STACK_ALIGNMENT)
1689 crtl->need_drap = true;
1691 dest = virtual_outgoing_args_rtx;
1692 if (!STACK_GROWS_DOWNWARD)
1694 if (CONST_INT_P (argsize))
1695 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1696 else
1697 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1699 dest = gen_rtx_MEM (BLKmode, dest);
1700 set_mem_align (dest, PARM_BOUNDARY);
1701 src = gen_rtx_MEM (BLKmode, incoming_args);
1702 set_mem_align (src, PARM_BOUNDARY);
1703 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1705 /* Refer to the argument block. */
1706 apply_args_size ();
1707 arguments = gen_rtx_MEM (BLKmode, arguments);
1708 set_mem_align (arguments, PARM_BOUNDARY);
1710 /* Walk past the arg-pointer and structure value address. */
1711 size = GET_MODE_SIZE (Pmode);
1712 if (struct_value)
1713 size += GET_MODE_SIZE (Pmode);
1715 /* Restore each of the registers previously saved. Make USE insns
1716 for each of these registers for use in making the call. */
1717 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1718 if ((mode = apply_args_mode[regno]) != VOIDmode)
1720 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1721 if (size % align != 0)
1722 size = CEIL (size, align) * align;
1723 reg = gen_rtx_REG (mode, regno);
1724 emit_move_insn (reg, adjust_address (arguments, mode, size));
1725 use_reg (&call_fusage, reg);
1726 size += GET_MODE_SIZE (mode);
1729 /* Restore the structure value address unless this is passed as an
1730 "invisible" first argument. */
1731 size = GET_MODE_SIZE (Pmode);
1732 if (struct_value)
1734 rtx value = gen_reg_rtx (Pmode);
1735 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1736 emit_move_insn (struct_value, value);
1737 if (REG_P (struct_value))
1738 use_reg (&call_fusage, struct_value);
1741 /* All arguments and registers used for the call are set up by now! */
1742 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1744 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1745 and we don't want to load it into a register as an optimization,
1746 because prepare_call_address already did it if it should be done. */
1747 if (GET_CODE (function) != SYMBOL_REF)
1748 function = memory_address (FUNCTION_MODE, function);
1750 /* Generate the actual call instruction and save the return value. */
1751 if (targetm.have_untyped_call ())
1753 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1754 rtx_insn *seq = targetm.gen_untyped_call (mem, result,
1755 result_vector (1, result));
1756 for (rtx_insn *insn = seq; insn; insn = NEXT_INSN (insn))
1757 if (CALL_P (insn))
1758 add_reg_note (insn, REG_UNTYPED_CALL, NULL_RTX);
1759 emit_insn (seq);
1761 else if (targetm.have_call_value ())
1763 rtx valreg = 0;
1765 /* Locate the unique return register. It is not possible to
1766 express a call that sets more than one return register using
1767 call_value; use untyped_call for that. In fact, untyped_call
1768 only needs to save the return registers in the given block. */
1769 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1770 if ((mode = apply_result_mode[regno]) != VOIDmode)
1772 gcc_assert (!valreg); /* have_untyped_call required. */
1774 valreg = gen_rtx_REG (mode, regno);
1777 emit_insn (targetm.gen_call_value (valreg,
1778 gen_rtx_MEM (FUNCTION_MODE, function),
1779 const0_rtx, NULL_RTX, const0_rtx));
1781 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1783 else
1784 gcc_unreachable ();
1786 /* Find the CALL insn we just emitted, and attach the register usage
1787 information. */
1788 call_insn = last_call_insn ();
1789 add_function_usage_to (call_insn, call_fusage);
1791 /* Restore the stack. */
1792 if (targetm.have_save_stack_nonlocal ())
1793 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1794 else
1795 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1796 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1798 OK_DEFER_POP;
1800 /* Return the address of the result block. */
1801 result = copy_addr_to_reg (XEXP (result, 0));
1802 return convert_memory_address (ptr_mode, result);
1805 /* Perform an untyped return. */
1807 static void
1808 expand_builtin_return (rtx result)
1810 int size, align, regno;
1811 fixed_size_mode mode;
1812 rtx reg;
1813 rtx_insn *call_fusage = 0;
1815 result = convert_memory_address (Pmode, result);
1817 apply_result_size ();
1818 result = gen_rtx_MEM (BLKmode, result);
1820 if (targetm.have_untyped_return ())
1822 rtx vector = result_vector (0, result);
1823 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1824 emit_barrier ();
1825 return;
1828 /* Restore the return value and note that each value is used. */
1829 size = 0;
1830 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1831 if ((mode = apply_result_mode[regno]) != VOIDmode)
1833 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1834 if (size % align != 0)
1835 size = CEIL (size, align) * align;
1836 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1837 emit_move_insn (reg, adjust_address (result, mode, size));
1839 push_to_sequence (call_fusage);
1840 emit_use (reg);
1841 call_fusage = get_insns ();
1842 end_sequence ();
1843 size += GET_MODE_SIZE (mode);
1846 /* Put the USE insns before the return. */
1847 emit_insn (call_fusage);
1849 /* Return whatever values was restored by jumping directly to the end
1850 of the function. */
1851 expand_naked_return ();
1854 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1856 static enum type_class
1857 type_to_class (tree type)
1859 switch (TREE_CODE (type))
1861 case VOID_TYPE: return void_type_class;
1862 case INTEGER_TYPE: return integer_type_class;
1863 case ENUMERAL_TYPE: return enumeral_type_class;
1864 case BOOLEAN_TYPE: return boolean_type_class;
1865 case POINTER_TYPE: return pointer_type_class;
1866 case REFERENCE_TYPE: return reference_type_class;
1867 case OFFSET_TYPE: return offset_type_class;
1868 case REAL_TYPE: return real_type_class;
1869 case COMPLEX_TYPE: return complex_type_class;
1870 case FUNCTION_TYPE: return function_type_class;
1871 case METHOD_TYPE: return method_type_class;
1872 case RECORD_TYPE: return record_type_class;
1873 case UNION_TYPE:
1874 case QUAL_UNION_TYPE: return union_type_class;
1875 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1876 ? string_type_class : array_type_class);
1877 case LANG_TYPE: return lang_type_class;
1878 case OPAQUE_TYPE: return opaque_type_class;
1879 case BITINT_TYPE: return bitint_type_class;
1880 default: return no_type_class;
1884 /* Expand a call EXP to __builtin_classify_type. */
1886 static rtx
1887 expand_builtin_classify_type (tree exp)
1889 if (call_expr_nargs (exp))
1890 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1891 return GEN_INT (no_type_class);
1894 /* This helper macro, meant to be used in mathfn_built_in below, determines
1895 which among a set of builtin math functions is appropriate for a given type
1896 mode. The `F' (float) and `L' (long double) are automatically generated
1897 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1898 types, there are additional types that are considered with 'F32', 'F64',
1899 'F128', etc. suffixes. */
1900 #define CASE_MATHFN(MATHFN) \
1901 CASE_CFN_##MATHFN: \
1902 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1903 fcodel = BUILT_IN_##MATHFN##L ; break;
1904 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1905 types. */
1906 #define CASE_MATHFN_FLOATN(MATHFN) \
1907 CASE_CFN_##MATHFN: \
1908 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1909 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1910 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1911 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1912 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1913 break;
1914 /* Similar to above, but appends _R after any F/L suffix. */
1915 #define CASE_MATHFN_REENT(MATHFN) \
1916 case CFN_BUILT_IN_##MATHFN##_R: \
1917 case CFN_BUILT_IN_##MATHFN##F_R: \
1918 case CFN_BUILT_IN_##MATHFN##L_R: \
1919 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1920 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1922 /* Return a function equivalent to FN but operating on floating-point
1923 values of type TYPE, or END_BUILTINS if no such function exists.
1924 This is purely an operation on function codes; it does not guarantee
1925 that the target actually has an implementation of the function. */
1927 static built_in_function
1928 mathfn_built_in_2 (tree type, combined_fn fn)
1930 tree mtype;
1931 built_in_function fcode, fcodef, fcodel;
1932 built_in_function fcodef16 = END_BUILTINS;
1933 built_in_function fcodef32 = END_BUILTINS;
1934 built_in_function fcodef64 = END_BUILTINS;
1935 built_in_function fcodef128 = END_BUILTINS;
1936 built_in_function fcodef32x = END_BUILTINS;
1937 built_in_function fcodef64x = END_BUILTINS;
1938 built_in_function fcodef128x = END_BUILTINS;
1940 /* If <math.h> has been included somehow, HUGE_VAL and NAN definitions
1941 break the uses below. */
1942 #undef HUGE_VAL
1943 #undef NAN
1945 switch (fn)
1947 #define SEQ_OF_CASE_MATHFN \
1948 CASE_MATHFN_FLOATN (ACOS) \
1949 CASE_MATHFN_FLOATN (ACOSH) \
1950 CASE_MATHFN_FLOATN (ASIN) \
1951 CASE_MATHFN_FLOATN (ASINH) \
1952 CASE_MATHFN_FLOATN (ATAN) \
1953 CASE_MATHFN_FLOATN (ATAN2) \
1954 CASE_MATHFN_FLOATN (ATANH) \
1955 CASE_MATHFN_FLOATN (CBRT) \
1956 CASE_MATHFN_FLOATN (CEIL) \
1957 CASE_MATHFN (CEXPI) \
1958 CASE_MATHFN_FLOATN (COPYSIGN) \
1959 CASE_MATHFN_FLOATN (COS) \
1960 CASE_MATHFN_FLOATN (COSH) \
1961 CASE_MATHFN (DREM) \
1962 CASE_MATHFN_FLOATN (ERF) \
1963 CASE_MATHFN_FLOATN (ERFC) \
1964 CASE_MATHFN_FLOATN (EXP) \
1965 CASE_MATHFN (EXP10) \
1966 CASE_MATHFN_FLOATN (EXP2) \
1967 CASE_MATHFN_FLOATN (EXPM1) \
1968 CASE_MATHFN_FLOATN (FABS) \
1969 CASE_MATHFN_FLOATN (FDIM) \
1970 CASE_MATHFN_FLOATN (FLOOR) \
1971 CASE_MATHFN_FLOATN (FMA) \
1972 CASE_MATHFN_FLOATN (FMAX) \
1973 CASE_MATHFN_FLOATN (FMIN) \
1974 CASE_MATHFN_FLOATN (FMOD) \
1975 CASE_MATHFN_FLOATN (FREXP) \
1976 CASE_MATHFN (GAMMA) \
1977 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */ \
1978 CASE_MATHFN_FLOATN (HUGE_VAL) \
1979 CASE_MATHFN_FLOATN (HYPOT) \
1980 CASE_MATHFN_FLOATN (ILOGB) \
1981 CASE_MATHFN (ICEIL) \
1982 CASE_MATHFN (IFLOOR) \
1983 CASE_MATHFN_FLOATN (INF) \
1984 CASE_MATHFN (IRINT) \
1985 CASE_MATHFN (IROUND) \
1986 CASE_MATHFN (ISINF) \
1987 CASE_MATHFN (J0) \
1988 CASE_MATHFN (J1) \
1989 CASE_MATHFN (JN) \
1990 CASE_MATHFN (LCEIL) \
1991 CASE_MATHFN_FLOATN (LDEXP) \
1992 CASE_MATHFN (LFLOOR) \
1993 CASE_MATHFN_FLOATN (LGAMMA) \
1994 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */ \
1995 CASE_MATHFN (LLCEIL) \
1996 CASE_MATHFN (LLFLOOR) \
1997 CASE_MATHFN_FLOATN (LLRINT) \
1998 CASE_MATHFN_FLOATN (LLROUND) \
1999 CASE_MATHFN_FLOATN (LOG) \
2000 CASE_MATHFN_FLOATN (LOG10) \
2001 CASE_MATHFN_FLOATN (LOG1P) \
2002 CASE_MATHFN_FLOATN (LOG2) \
2003 CASE_MATHFN_FLOATN (LOGB) \
2004 CASE_MATHFN_FLOATN (LRINT) \
2005 CASE_MATHFN_FLOATN (LROUND) \
2006 CASE_MATHFN_FLOATN (MODF) \
2007 CASE_MATHFN_FLOATN (NAN) \
2008 CASE_MATHFN_FLOATN (NANS) \
2009 CASE_MATHFN_FLOATN (NEARBYINT) \
2010 CASE_MATHFN_FLOATN (NEXTAFTER) \
2011 CASE_MATHFN (NEXTTOWARD) \
2012 CASE_MATHFN_FLOATN (POW) \
2013 CASE_MATHFN (POWI) \
2014 CASE_MATHFN (POW10) \
2015 CASE_MATHFN_FLOATN (REMAINDER) \
2016 CASE_MATHFN_FLOATN (REMQUO) \
2017 CASE_MATHFN_FLOATN (RINT) \
2018 CASE_MATHFN_FLOATN (ROUND) \
2019 CASE_MATHFN_FLOATN (ROUNDEVEN) \
2020 CASE_MATHFN (SCALB) \
2021 CASE_MATHFN_FLOATN (SCALBLN) \
2022 CASE_MATHFN_FLOATN (SCALBN) \
2023 CASE_MATHFN (SIGNBIT) \
2024 CASE_MATHFN (SIGNIFICAND) \
2025 CASE_MATHFN_FLOATN (SIN) \
2026 CASE_MATHFN (SINCOS) \
2027 CASE_MATHFN_FLOATN (SINH) \
2028 CASE_MATHFN_FLOATN (SQRT) \
2029 CASE_MATHFN_FLOATN (TAN) \
2030 CASE_MATHFN_FLOATN (TANH) \
2031 CASE_MATHFN_FLOATN (TGAMMA) \
2032 CASE_MATHFN_FLOATN (TRUNC) \
2033 CASE_MATHFN (Y0) \
2034 CASE_MATHFN (Y1) \
2035 CASE_MATHFN (YN)
2037 SEQ_OF_CASE_MATHFN
2039 default:
2040 return END_BUILTINS;
2043 mtype = TYPE_MAIN_VARIANT (type);
2044 if (mtype == double_type_node)
2045 return fcode;
2046 else if (mtype == float_type_node)
2047 return fcodef;
2048 else if (mtype == long_double_type_node)
2049 return fcodel;
2050 else if (mtype == float16_type_node)
2051 return fcodef16;
2052 else if (mtype == float32_type_node)
2053 return fcodef32;
2054 else if (mtype == float64_type_node)
2055 return fcodef64;
2056 else if (mtype == float128_type_node)
2057 return fcodef128;
2058 else if (mtype == float32x_type_node)
2059 return fcodef32x;
2060 else if (mtype == float64x_type_node)
2061 return fcodef64x;
2062 else if (mtype == float128x_type_node)
2063 return fcodef128x;
2064 else
2065 return END_BUILTINS;
2068 #undef CASE_MATHFN
2069 #undef CASE_MATHFN_FLOATN
2070 #undef CASE_MATHFN_REENT
2072 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2073 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2074 otherwise use the explicit declaration. If we can't do the conversion,
2075 return null. */
2077 static tree
2078 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2080 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2081 if (fcode2 == END_BUILTINS)
2082 return NULL_TREE;
2084 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2085 return NULL_TREE;
2087 return builtin_decl_explicit (fcode2);
2090 /* Like mathfn_built_in_1, but always use the implicit array. */
2092 tree
2093 mathfn_built_in (tree type, combined_fn fn)
2095 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2098 /* Like mathfn_built_in_1, but always use the explicit array. */
2100 tree
2101 mathfn_built_in_explicit (tree type, combined_fn fn)
2103 return mathfn_built_in_1 (type, fn, /*implicit=*/ 0);
2106 /* Like mathfn_built_in_1, but take a built_in_function and
2107 always use the implicit array. */
2109 tree
2110 mathfn_built_in (tree type, enum built_in_function fn)
2112 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2115 /* Return the type associated with a built in function, i.e., the one
2116 to be passed to mathfn_built_in to get the type-specific
2117 function. */
2119 tree
2120 mathfn_built_in_type (combined_fn fn)
2122 #define CASE_MATHFN(MATHFN) \
2123 case CFN_BUILT_IN_##MATHFN: \
2124 return double_type_node; \
2125 case CFN_BUILT_IN_##MATHFN##F: \
2126 return float_type_node; \
2127 case CFN_BUILT_IN_##MATHFN##L: \
2128 return long_double_type_node;
2130 #define CASE_MATHFN_FLOATN(MATHFN) \
2131 CASE_MATHFN(MATHFN) \
2132 case CFN_BUILT_IN_##MATHFN##F16: \
2133 return float16_type_node; \
2134 case CFN_BUILT_IN_##MATHFN##F32: \
2135 return float32_type_node; \
2136 case CFN_BUILT_IN_##MATHFN##F64: \
2137 return float64_type_node; \
2138 case CFN_BUILT_IN_##MATHFN##F128: \
2139 return float128_type_node; \
2140 case CFN_BUILT_IN_##MATHFN##F32X: \
2141 return float32x_type_node; \
2142 case CFN_BUILT_IN_##MATHFN##F64X: \
2143 return float64x_type_node; \
2144 case CFN_BUILT_IN_##MATHFN##F128X: \
2145 return float128x_type_node;
2147 /* Similar to above, but appends _R after any F/L suffix. */
2148 #define CASE_MATHFN_REENT(MATHFN) \
2149 case CFN_BUILT_IN_##MATHFN##_R: \
2150 return double_type_node; \
2151 case CFN_BUILT_IN_##MATHFN##F_R: \
2152 return float_type_node; \
2153 case CFN_BUILT_IN_##MATHFN##L_R: \
2154 return long_double_type_node;
2156 switch (fn)
2158 SEQ_OF_CASE_MATHFN
2160 default:
2161 return NULL_TREE;
2164 #undef CASE_MATHFN
2165 #undef CASE_MATHFN_FLOATN
2166 #undef CASE_MATHFN_REENT
2167 #undef SEQ_OF_CASE_MATHFN
2170 /* Check whether there is an internal function associated with function FN
2171 and return type RETURN_TYPE. Return the function if so, otherwise return
2172 IFN_LAST.
2174 Note that this function only tests whether the function is defined in
2175 internals.def, not whether it is actually available on the target. */
2177 static internal_fn
2178 associated_internal_fn (built_in_function fn, tree return_type)
2180 switch (fn)
2182 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2183 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2184 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2185 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2186 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2187 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2188 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2189 #include "internal-fn.def"
2191 CASE_FLT_FN (BUILT_IN_POW10):
2192 return IFN_EXP10;
2194 CASE_FLT_FN (BUILT_IN_DREM):
2195 return IFN_REMAINDER;
2197 CASE_FLT_FN (BUILT_IN_SCALBN):
2198 CASE_FLT_FN (BUILT_IN_SCALBLN):
2199 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2200 return IFN_LDEXP;
2201 return IFN_LAST;
2203 default:
2204 return IFN_LAST;
2208 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2209 return its code, otherwise return IFN_LAST. Note that this function
2210 only tests whether the function is defined in internals.def, not whether
2211 it is actually available on the target. */
2213 internal_fn
2214 associated_internal_fn (tree fndecl)
2216 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2217 return associated_internal_fn (DECL_FUNCTION_CODE (fndecl),
2218 TREE_TYPE (TREE_TYPE (fndecl)));
2221 /* Check whether there is an internal function associated with function CFN
2222 and return type RETURN_TYPE. Return the function if so, otherwise return
2223 IFN_LAST.
2225 Note that this function only tests whether the function is defined in
2226 internals.def, not whether it is actually available on the target. */
2228 internal_fn
2229 associated_internal_fn (combined_fn cfn, tree return_type)
2231 if (internal_fn_p (cfn))
2232 return as_internal_fn (cfn);
2233 return associated_internal_fn (as_builtin_fn (cfn), return_type);
2236 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2237 on the current target by a call to an internal function, return the
2238 code of that internal function, otherwise return IFN_LAST. The caller
2239 is responsible for ensuring that any side-effects of the built-in
2240 call are dealt with correctly. E.g. if CALL sets errno, the caller
2241 must decide that the errno result isn't needed or make it available
2242 in some other way. */
2244 internal_fn
2245 replacement_internal_fn (gcall *call)
2247 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2249 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2250 if (ifn != IFN_LAST)
2252 tree_pair types = direct_internal_fn_types (ifn, call);
2253 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2254 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2255 return ifn;
2258 return IFN_LAST;
2261 /* Expand a call to the builtin trinary math functions (fma).
2262 Return NULL_RTX if a normal call should be emitted rather than expanding the
2263 function in-line. EXP is the expression that is a call to the builtin
2264 function; if convenient, the result should be placed in TARGET.
2265 SUBTARGET may be used as the target for computing one of EXP's
2266 operands. */
2268 static rtx
2269 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2271 optab builtin_optab;
2272 rtx op0, op1, op2, result;
2273 rtx_insn *insns;
2274 tree fndecl = get_callee_fndecl (exp);
2275 tree arg0, arg1, arg2;
2276 machine_mode mode;
2278 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2279 return NULL_RTX;
2281 arg0 = CALL_EXPR_ARG (exp, 0);
2282 arg1 = CALL_EXPR_ARG (exp, 1);
2283 arg2 = CALL_EXPR_ARG (exp, 2);
2285 switch (DECL_FUNCTION_CODE (fndecl))
2287 CASE_FLT_FN (BUILT_IN_FMA):
2288 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2289 builtin_optab = fma_optab; break;
2290 default:
2291 gcc_unreachable ();
2294 /* Make a suitable register to place result in. */
2295 mode = TYPE_MODE (TREE_TYPE (exp));
2297 /* Before working hard, check whether the instruction is available. */
2298 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2299 return NULL_RTX;
2301 result = gen_reg_rtx (mode);
2303 /* Always stabilize the argument list. */
2304 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2305 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2306 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2308 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2309 op1 = expand_normal (arg1);
2310 op2 = expand_normal (arg2);
2312 start_sequence ();
2314 /* Compute into RESULT.
2315 Set RESULT to wherever the result comes back. */
2316 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2317 result, 0);
2319 /* If we were unable to expand via the builtin, stop the sequence
2320 (without outputting the insns) and call to the library function
2321 with the stabilized argument list. */
2322 if (result == 0)
2324 end_sequence ();
2325 return expand_call (exp, target, target == const0_rtx);
2328 /* Output the entire sequence. */
2329 insns = get_insns ();
2330 end_sequence ();
2331 emit_insn (insns);
2333 return result;
2336 /* Expand a call to the builtin sin and cos math functions.
2337 Return NULL_RTX if a normal call should be emitted rather than expanding the
2338 function in-line. EXP is the expression that is a call to the builtin
2339 function; if convenient, the result should be placed in TARGET.
2340 SUBTARGET may be used as the target for computing one of EXP's
2341 operands. */
2343 static rtx
2344 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2346 optab builtin_optab;
2347 rtx op0;
2348 rtx_insn *insns;
2349 tree fndecl = get_callee_fndecl (exp);
2350 machine_mode mode;
2351 tree arg;
2353 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2354 return NULL_RTX;
2356 arg = CALL_EXPR_ARG (exp, 0);
2358 switch (DECL_FUNCTION_CODE (fndecl))
2360 CASE_FLT_FN (BUILT_IN_SIN):
2361 CASE_FLT_FN (BUILT_IN_COS):
2362 builtin_optab = sincos_optab; break;
2363 default:
2364 gcc_unreachable ();
2367 /* Make a suitable register to place result in. */
2368 mode = TYPE_MODE (TREE_TYPE (exp));
2370 /* Check if sincos insn is available, otherwise fallback
2371 to sin or cos insn. */
2372 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2373 switch (DECL_FUNCTION_CODE (fndecl))
2375 CASE_FLT_FN (BUILT_IN_SIN):
2376 builtin_optab = sin_optab; break;
2377 CASE_FLT_FN (BUILT_IN_COS):
2378 builtin_optab = cos_optab; break;
2379 default:
2380 gcc_unreachable ();
2383 /* Before working hard, check whether the instruction is available. */
2384 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2386 rtx result = gen_reg_rtx (mode);
2388 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2389 need to expand the argument again. This way, we will not perform
2390 side-effects more the once. */
2391 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2393 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2395 start_sequence ();
2397 /* Compute into RESULT.
2398 Set RESULT to wherever the result comes back. */
2399 if (builtin_optab == sincos_optab)
2401 int ok;
2403 switch (DECL_FUNCTION_CODE (fndecl))
2405 CASE_FLT_FN (BUILT_IN_SIN):
2406 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2407 break;
2408 CASE_FLT_FN (BUILT_IN_COS):
2409 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2410 break;
2411 default:
2412 gcc_unreachable ();
2414 gcc_assert (ok);
2416 else
2417 result = expand_unop (mode, builtin_optab, op0, result, 0);
2419 if (result != 0)
2421 /* Output the entire sequence. */
2422 insns = get_insns ();
2423 end_sequence ();
2424 emit_insn (insns);
2425 return result;
2428 /* If we were unable to expand via the builtin, stop the sequence
2429 (without outputting the insns) and call to the library function
2430 with the stabilized argument list. */
2431 end_sequence ();
2434 return expand_call (exp, target, target == const0_rtx);
2437 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2438 return an RTL instruction code that implements the functionality.
2439 If that isn't possible or available return CODE_FOR_nothing. */
2441 static enum insn_code
2442 interclass_mathfn_icode (tree arg, tree fndecl)
2444 bool errno_set = false;
2445 optab builtin_optab = unknown_optab;
2446 machine_mode mode;
2448 switch (DECL_FUNCTION_CODE (fndecl))
2450 CASE_FLT_FN (BUILT_IN_ILOGB):
2451 errno_set = true; builtin_optab = ilogb_optab; break;
2452 CASE_FLT_FN (BUILT_IN_ISINF):
2453 builtin_optab = isinf_optab; break;
2454 case BUILT_IN_ISNORMAL:
2455 case BUILT_IN_ISFINITE:
2456 CASE_FLT_FN (BUILT_IN_FINITE):
2457 case BUILT_IN_FINITED32:
2458 case BUILT_IN_FINITED64:
2459 case BUILT_IN_FINITED128:
2460 case BUILT_IN_ISINFD32:
2461 case BUILT_IN_ISINFD64:
2462 case BUILT_IN_ISINFD128:
2463 /* These builtins have no optabs (yet). */
2464 break;
2465 default:
2466 gcc_unreachable ();
2469 /* There's no easy way to detect the case we need to set EDOM. */
2470 if (flag_errno_math && errno_set)
2471 return CODE_FOR_nothing;
2473 /* Optab mode depends on the mode of the input argument. */
2474 mode = TYPE_MODE (TREE_TYPE (arg));
2476 if (builtin_optab)
2477 return optab_handler (builtin_optab, mode);
2478 return CODE_FOR_nothing;
2481 /* Expand a call to one of the builtin math functions that operate on
2482 floating point argument and output an integer result (ilogb, isinf,
2483 isnan, etc).
2484 Return 0 if a normal call should be emitted rather than expanding the
2485 function in-line. EXP is the expression that is a call to the builtin
2486 function; if convenient, the result should be placed in TARGET. */
2488 static rtx
2489 expand_builtin_interclass_mathfn (tree exp, rtx target)
2491 enum insn_code icode = CODE_FOR_nothing;
2492 rtx op0;
2493 tree fndecl = get_callee_fndecl (exp);
2494 machine_mode mode;
2495 tree arg;
2497 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2498 return NULL_RTX;
2500 arg = CALL_EXPR_ARG (exp, 0);
2501 icode = interclass_mathfn_icode (arg, fndecl);
2502 mode = TYPE_MODE (TREE_TYPE (arg));
2504 if (icode != CODE_FOR_nothing)
2506 class expand_operand ops[1];
2507 rtx_insn *last = get_last_insn ();
2508 tree orig_arg = arg;
2510 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2511 need to expand the argument again. This way, we will not perform
2512 side-effects more the once. */
2513 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2515 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2517 if (mode != GET_MODE (op0))
2518 op0 = convert_to_mode (mode, op0, 0);
2520 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2521 if (maybe_legitimize_operands (icode, 0, 1, ops)
2522 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2523 return ops[0].value;
2525 delete_insns_since (last);
2526 CALL_EXPR_ARG (exp, 0) = orig_arg;
2529 return NULL_RTX;
2532 /* Expand a call to the builtin sincos math function.
2533 Return NULL_RTX if a normal call should be emitted rather than expanding the
2534 function in-line. EXP is the expression that is a call to the builtin
2535 function. */
2537 static rtx
2538 expand_builtin_sincos (tree exp)
2540 rtx op0, op1, op2, target1, target2;
2541 machine_mode mode;
2542 tree arg, sinp, cosp;
2543 int result;
2544 location_t loc = EXPR_LOCATION (exp);
2545 tree alias_type, alias_off;
2547 if (!validate_arglist (exp, REAL_TYPE,
2548 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2549 return NULL_RTX;
2551 arg = CALL_EXPR_ARG (exp, 0);
2552 sinp = CALL_EXPR_ARG (exp, 1);
2553 cosp = CALL_EXPR_ARG (exp, 2);
2555 /* Make a suitable register to place result in. */
2556 mode = TYPE_MODE (TREE_TYPE (arg));
2558 /* Check if sincos insn is available, otherwise emit the call. */
2559 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2560 return NULL_RTX;
2562 target1 = gen_reg_rtx (mode);
2563 target2 = gen_reg_rtx (mode);
2565 op0 = expand_normal (arg);
2566 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2567 alias_off = build_int_cst (alias_type, 0);
2568 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2569 sinp, alias_off));
2570 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2571 cosp, alias_off));
2573 /* Compute into target1 and target2.
2574 Set TARGET to wherever the result comes back. */
2575 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2576 gcc_assert (result);
2578 /* Move target1 and target2 to the memory locations indicated
2579 by op1 and op2. */
2580 emit_move_insn (op1, target1);
2581 emit_move_insn (op2, target2);
2583 return const0_rtx;
2586 /* Expand call EXP to the fegetround builtin (from C99 fenv.h), returning the
2587 result and setting it in TARGET. Otherwise return NULL_RTX on failure. */
2588 static rtx
2589 expand_builtin_fegetround (tree exp, rtx target, machine_mode target_mode)
2591 if (!validate_arglist (exp, VOID_TYPE))
2592 return NULL_RTX;
2594 insn_code icode = direct_optab_handler (fegetround_optab, SImode);
2595 if (icode == CODE_FOR_nothing)
2596 return NULL_RTX;
2598 if (target == 0
2599 || GET_MODE (target) != target_mode
2600 || !(*insn_data[icode].operand[0].predicate) (target, target_mode))
2601 target = gen_reg_rtx (target_mode);
2603 rtx pat = GEN_FCN (icode) (target);
2604 if (!pat)
2605 return NULL_RTX;
2606 emit_insn (pat);
2608 return target;
2611 /* Expand call EXP to either feclearexcept or feraiseexcept builtins (from C99
2612 fenv.h), returning the result and setting it in TARGET. Otherwise return
2613 NULL_RTX on failure. */
2614 static rtx
2615 expand_builtin_feclear_feraise_except (tree exp, rtx target,
2616 machine_mode target_mode, optab op_optab)
2618 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
2619 return NULL_RTX;
2620 rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
2622 insn_code icode = direct_optab_handler (op_optab, SImode);
2623 if (icode == CODE_FOR_nothing)
2624 return NULL_RTX;
2626 if (!(*insn_data[icode].operand[1].predicate) (op0, GET_MODE (op0)))
2627 return NULL_RTX;
2629 if (target == 0
2630 || GET_MODE (target) != target_mode
2631 || !(*insn_data[icode].operand[0].predicate) (target, target_mode))
2632 target = gen_reg_rtx (target_mode);
2634 rtx pat = GEN_FCN (icode) (target, op0);
2635 if (!pat)
2636 return NULL_RTX;
2637 emit_insn (pat);
2639 return target;
2642 /* Expand a call to the internal cexpi builtin to the sincos math function.
2643 EXP is the expression that is a call to the builtin function; if convenient,
2644 the result should be placed in TARGET. */
2646 static rtx
2647 expand_builtin_cexpi (tree exp, rtx target)
2649 tree fndecl = get_callee_fndecl (exp);
2650 tree arg, type;
2651 machine_mode mode;
2652 rtx op0, op1, op2;
2653 location_t loc = EXPR_LOCATION (exp);
2655 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2656 return NULL_RTX;
2658 arg = CALL_EXPR_ARG (exp, 0);
2659 type = TREE_TYPE (arg);
2660 mode = TYPE_MODE (TREE_TYPE (arg));
2662 /* Try expanding via a sincos optab, fall back to emitting a libcall
2663 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2664 is only generated from sincos, cexp or if we have either of them. */
2665 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2667 op1 = gen_reg_rtx (mode);
2668 op2 = gen_reg_rtx (mode);
2670 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2672 /* Compute into op1 and op2. */
2673 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2675 else if (targetm.libc_has_function (function_sincos, type))
2677 tree call, fn = NULL_TREE;
2678 tree top1, top2;
2679 rtx op1a, op2a;
2681 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2682 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2683 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2684 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2685 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2686 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2687 else
2688 gcc_unreachable ();
2690 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2691 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2692 op1a = copy_addr_to_reg (XEXP (op1, 0));
2693 op2a = copy_addr_to_reg (XEXP (op2, 0));
2694 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2695 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2697 /* Make sure not to fold the sincos call again. */
2698 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2699 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2700 call, 3, arg, top1, top2));
2702 else
2704 tree call, fn = NULL_TREE, narg;
2705 tree ctype = build_complex_type (type);
2707 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2708 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2709 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2710 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2711 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2712 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2713 else
2714 gcc_unreachable ();
2716 /* If we don't have a decl for cexp create one. This is the
2717 friendliest fallback if the user calls __builtin_cexpi
2718 without full target C99 function support. */
2719 if (fn == NULL_TREE)
2721 tree fntype;
2722 const char *name = NULL;
2724 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2725 name = "cexpf";
2726 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2727 name = "cexp";
2728 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2729 name = "cexpl";
2731 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2732 fn = build_fn_decl (name, fntype);
2735 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2736 build_real (type, dconst0), arg);
2738 /* Make sure not to fold the cexp call again. */
2739 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2740 return expand_expr (build_call_nary (ctype, call, 1, narg),
2741 target, VOIDmode, EXPAND_NORMAL);
2744 /* Now build the proper return type. */
2745 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2746 make_tree (TREE_TYPE (arg), op2),
2747 make_tree (TREE_TYPE (arg), op1)),
2748 target, VOIDmode, EXPAND_NORMAL);
2751 /* Conveniently construct a function call expression. FNDECL names the
2752 function to be called, N is the number of arguments, and the "..."
2753 parameters are the argument expressions. Unlike build_call_exr
2754 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2756 static tree
2757 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2759 va_list ap;
2760 tree fntype = TREE_TYPE (fndecl);
2761 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2763 va_start (ap, n);
2764 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2765 va_end (ap);
2766 SET_EXPR_LOCATION (fn, loc);
2767 return fn;
2770 /* Expand the __builtin_issignaling builtin. This needs to handle
2771 all floating point formats that do support NaNs (for those that
2772 don't it just sets target to 0). */
2774 static rtx
2775 expand_builtin_issignaling (tree exp, rtx target)
2777 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2778 return NULL_RTX;
2780 tree arg = CALL_EXPR_ARG (exp, 0);
2781 scalar_float_mode fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
2782 const struct real_format *fmt = REAL_MODE_FORMAT (fmode);
2784 /* Expand the argument yielding a RTX expression. */
2785 rtx temp = expand_normal (arg);
2787 /* If mode doesn't support NaN, always return 0.
2788 Don't use !HONOR_SNANS (fmode) here, so there is some possibility of
2789 __builtin_issignaling working without -fsignaling-nans. Especially
2790 when -fno-signaling-nans is the default.
2791 On the other side, MODE_HAS_NANS (fmode) is unnecessary, with
2792 -ffinite-math-only even __builtin_isnan or __builtin_fpclassify
2793 fold to 0 or non-NaN/Inf classification. */
2794 if (!HONOR_NANS (fmode))
2796 emit_move_insn (target, const0_rtx);
2797 return target;
2800 /* Check if the back end provides an insn that handles issignaling for the
2801 argument's mode. */
2802 enum insn_code icode = optab_handler (issignaling_optab, fmode);
2803 if (icode != CODE_FOR_nothing)
2805 rtx_insn *last = get_last_insn ();
2806 rtx this_target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2807 if (maybe_emit_unop_insn (icode, this_target, temp, UNKNOWN))
2808 return this_target;
2809 delete_insns_since (last);
2812 if (DECIMAL_FLOAT_MODE_P (fmode))
2814 scalar_int_mode imode;
2815 rtx hi;
2816 switch (fmt->ieee_bits)
2818 case 32:
2819 case 64:
2820 imode = int_mode_for_mode (fmode).require ();
2821 temp = gen_lowpart (imode, temp);
2822 break;
2823 case 128:
2824 imode = int_mode_for_size (64, 1).require ();
2825 hi = NULL_RTX;
2826 /* For decimal128, TImode support isn't always there and even when
2827 it is, working on the DImode high part is usually better. */
2828 if (!MEM_P (temp))
2830 if (rtx t = simplify_gen_subreg (imode, temp, fmode,
2831 subreg_highpart_offset (imode,
2832 fmode)))
2833 hi = t;
2834 else
2836 scalar_int_mode imode2;
2837 if (int_mode_for_mode (fmode).exists (&imode2))
2839 rtx temp2 = gen_lowpart (imode2, temp);
2840 poly_uint64 off = subreg_highpart_offset (imode, imode2);
2841 if (rtx t = simplify_gen_subreg (imode, temp2,
2842 imode2, off))
2843 hi = t;
2846 if (!hi)
2848 rtx mem = assign_stack_temp (fmode, GET_MODE_SIZE (fmode));
2849 emit_move_insn (mem, temp);
2850 temp = mem;
2853 if (!hi)
2855 poly_int64 offset
2856 = subreg_highpart_offset (imode, GET_MODE (temp));
2857 hi = adjust_address (temp, imode, offset);
2859 temp = hi;
2860 break;
2861 default:
2862 gcc_unreachable ();
2864 /* In all of decimal{32,64,128}, there is MSB sign bit and sNaN
2865 have 6 bits below it all set. */
2866 rtx val
2867 = GEN_INT (HOST_WIDE_INT_C (0x3f) << (GET_MODE_BITSIZE (imode) - 7));
2868 temp = expand_binop (imode, and_optab, temp, val,
2869 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2870 temp = emit_store_flag_force (target, EQ, temp, val, imode, 1, 1);
2871 return temp;
2874 /* Only PDP11 has these defined differently but doesn't support NaNs. */
2875 gcc_assert (FLOAT_WORDS_BIG_ENDIAN == WORDS_BIG_ENDIAN);
2876 gcc_assert (fmt->signbit_ro > 0 && fmt->b == 2);
2877 gcc_assert (MODE_COMPOSITE_P (fmode)
2878 || (fmt->pnan == fmt->p
2879 && fmt->signbit_ro == fmt->signbit_rw));
2881 switch (fmt->p)
2883 case 106: /* IBM double double */
2884 /* For IBM double double, recurse on the most significant double. */
2885 gcc_assert (MODE_COMPOSITE_P (fmode));
2886 temp = convert_modes (DFmode, fmode, temp, 0);
2887 fmode = DFmode;
2888 fmt = REAL_MODE_FORMAT (DFmode);
2889 /* FALLTHRU */
2890 case 8: /* bfloat */
2891 case 11: /* IEEE half */
2892 case 24: /* IEEE single */
2893 case 53: /* IEEE double or Intel extended with rounding to double */
2894 if (fmt->p == 53 && fmt->signbit_ro == 79)
2895 goto extended;
2897 scalar_int_mode imode = int_mode_for_mode (fmode).require ();
2898 temp = gen_lowpart (imode, temp);
2899 rtx val = GEN_INT ((HOST_WIDE_INT_M1U << (fmt->p - 2))
2900 & ~(HOST_WIDE_INT_M1U << fmt->signbit_ro));
2901 if (fmt->qnan_msb_set)
2903 rtx mask = GEN_INT (~(HOST_WIDE_INT_M1U << fmt->signbit_ro));
2904 rtx bit = GEN_INT (HOST_WIDE_INT_1U << (fmt->p - 2));
2905 /* For non-MIPS/PA IEEE single/double/half or bfloat, expand to:
2906 ((temp ^ bit) & mask) > val. */
2907 temp = expand_binop (imode, xor_optab, temp, bit,
2908 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2909 temp = expand_binop (imode, and_optab, temp, mask,
2910 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2911 temp = emit_store_flag_force (target, GTU, temp, val, imode,
2912 1, 1);
2914 else
2916 /* For MIPS/PA IEEE single/double, expand to:
2917 (temp & val) == val. */
2918 temp = expand_binop (imode, and_optab, temp, val,
2919 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2920 temp = emit_store_flag_force (target, EQ, temp, val, imode,
2921 1, 1);
2924 break;
2925 case 113: /* IEEE quad */
2927 rtx hi = NULL_RTX, lo = NULL_RTX;
2928 scalar_int_mode imode = int_mode_for_size (64, 1).require ();
2929 /* For IEEE quad, TImode support isn't always there and even when
2930 it is, working on DImode parts is usually better. */
2931 if (!MEM_P (temp))
2933 hi = simplify_gen_subreg (imode, temp, fmode,
2934 subreg_highpart_offset (imode, fmode));
2935 lo = simplify_gen_subreg (imode, temp, fmode,
2936 subreg_lowpart_offset (imode, fmode));
2937 if (!hi || !lo)
2939 scalar_int_mode imode2;
2940 if (int_mode_for_mode (fmode).exists (&imode2))
2942 rtx temp2 = gen_lowpart (imode2, temp);
2943 hi = simplify_gen_subreg (imode, temp2, imode2,
2944 subreg_highpart_offset (imode,
2945 imode2));
2946 lo = simplify_gen_subreg (imode, temp2, imode2,
2947 subreg_lowpart_offset (imode,
2948 imode2));
2951 if (!hi || !lo)
2953 rtx mem = assign_stack_temp (fmode, GET_MODE_SIZE (fmode));
2954 emit_move_insn (mem, temp);
2955 temp = mem;
2958 if (!hi || !lo)
2960 poly_int64 offset
2961 = subreg_highpart_offset (imode, GET_MODE (temp));
2962 hi = adjust_address (temp, imode, offset);
2963 offset = subreg_lowpart_offset (imode, GET_MODE (temp));
2964 lo = adjust_address (temp, imode, offset);
2966 rtx val = GEN_INT ((HOST_WIDE_INT_M1U << (fmt->p - 2 - 64))
2967 & ~(HOST_WIDE_INT_M1U << (fmt->signbit_ro - 64)));
2968 if (fmt->qnan_msb_set)
2970 rtx mask = GEN_INT (~(HOST_WIDE_INT_M1U << (fmt->signbit_ro
2971 - 64)));
2972 rtx bit = GEN_INT (HOST_WIDE_INT_1U << (fmt->p - 2 - 64));
2973 /* For non-MIPS/PA IEEE quad, expand to:
2974 (((hi ^ bit) | ((lo | -lo) >> 63)) & mask) > val. */
2975 rtx nlo = expand_unop (imode, neg_optab, lo, NULL_RTX, 0);
2976 lo = expand_binop (imode, ior_optab, lo, nlo,
2977 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2978 lo = expand_shift (RSHIFT_EXPR, imode, lo, 63, NULL_RTX, 1);
2979 temp = expand_binop (imode, xor_optab, hi, bit,
2980 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2981 temp = expand_binop (imode, ior_optab, temp, lo,
2982 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2983 temp = expand_binop (imode, and_optab, temp, mask,
2984 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2985 temp = emit_store_flag_force (target, GTU, temp, val, imode,
2986 1, 1);
2988 else
2990 /* For MIPS/PA IEEE quad, expand to:
2991 (hi & val) == val. */
2992 temp = expand_binop (imode, and_optab, hi, val,
2993 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2994 temp = emit_store_flag_force (target, EQ, temp, val, imode,
2995 1, 1);
2998 break;
2999 case 64: /* Intel or Motorola extended */
3000 extended:
3002 rtx ex, hi, lo;
3003 scalar_int_mode imode = int_mode_for_size (32, 1).require ();
3004 scalar_int_mode iemode = int_mode_for_size (16, 1).require ();
3005 if (!MEM_P (temp))
3007 rtx mem = assign_stack_temp (fmode, GET_MODE_SIZE (fmode));
3008 emit_move_insn (mem, temp);
3009 temp = mem;
3011 if (fmt->signbit_ro == 95)
3013 /* Motorola, always big endian, with 16-bit gap in between
3014 16-bit sign+exponent and 64-bit mantissa. */
3015 ex = adjust_address (temp, iemode, 0);
3016 hi = adjust_address (temp, imode, 4);
3017 lo = adjust_address (temp, imode, 8);
3019 else if (!WORDS_BIG_ENDIAN)
3021 /* Intel little endian, 64-bit mantissa followed by 16-bit
3022 sign+exponent and then either 16 or 48 bits of gap. */
3023 ex = adjust_address (temp, iemode, 8);
3024 hi = adjust_address (temp, imode, 4);
3025 lo = adjust_address (temp, imode, 0);
3027 else
3029 /* Big endian Itanium. */
3030 ex = adjust_address (temp, iemode, 0);
3031 hi = adjust_address (temp, imode, 2);
3032 lo = adjust_address (temp, imode, 6);
3034 rtx val = GEN_INT (HOST_WIDE_INT_M1U << 30);
3035 gcc_assert (fmt->qnan_msb_set);
3036 rtx mask = GEN_INT (0x7fff);
3037 rtx bit = GEN_INT (HOST_WIDE_INT_1U << 30);
3038 /* For Intel/Motorola extended format, expand to:
3039 (ex & mask) == mask && ((hi ^ bit) | ((lo | -lo) >> 31)) > val. */
3040 rtx nlo = expand_unop (imode, neg_optab, lo, NULL_RTX, 0);
3041 lo = expand_binop (imode, ior_optab, lo, nlo,
3042 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3043 lo = expand_shift (RSHIFT_EXPR, imode, lo, 31, NULL_RTX, 1);
3044 temp = expand_binop (imode, xor_optab, hi, bit,
3045 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3046 temp = expand_binop (imode, ior_optab, temp, lo,
3047 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3048 temp = emit_store_flag_force (target, GTU, temp, val, imode, 1, 1);
3049 ex = expand_binop (iemode, and_optab, ex, mask,
3050 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3051 ex = emit_store_flag_force (gen_reg_rtx (GET_MODE (temp)), EQ,
3052 ex, mask, iemode, 1, 1);
3053 temp = expand_binop (GET_MODE (temp), and_optab, temp, ex,
3054 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3056 break;
3057 default:
3058 gcc_unreachable ();
3061 return temp;
3064 /* Expand a call to one of the builtin rounding functions gcc defines
3065 as an extension (lfloor and lceil). As these are gcc extensions we
3066 do not need to worry about setting errno to EDOM.
3067 If expanding via optab fails, lower expression to (int)(floor(x)).
3068 EXP is the expression that is a call to the builtin function;
3069 if convenient, the result should be placed in TARGET. */
3071 static rtx
3072 expand_builtin_int_roundingfn (tree exp, rtx target)
3074 convert_optab builtin_optab;
3075 rtx op0, tmp;
3076 rtx_insn *insns;
3077 tree fndecl = get_callee_fndecl (exp);
3078 enum built_in_function fallback_fn;
3079 tree fallback_fndecl;
3080 machine_mode mode;
3081 tree arg;
3083 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3084 return NULL_RTX;
3086 arg = CALL_EXPR_ARG (exp, 0);
3088 switch (DECL_FUNCTION_CODE (fndecl))
3090 CASE_FLT_FN (BUILT_IN_ICEIL):
3091 CASE_FLT_FN (BUILT_IN_LCEIL):
3092 CASE_FLT_FN (BUILT_IN_LLCEIL):
3093 builtin_optab = lceil_optab;
3094 fallback_fn = BUILT_IN_CEIL;
3095 break;
3097 CASE_FLT_FN (BUILT_IN_IFLOOR):
3098 CASE_FLT_FN (BUILT_IN_LFLOOR):
3099 CASE_FLT_FN (BUILT_IN_LLFLOOR):
3100 builtin_optab = lfloor_optab;
3101 fallback_fn = BUILT_IN_FLOOR;
3102 break;
3104 default:
3105 gcc_unreachable ();
3108 /* Make a suitable register to place result in. */
3109 mode = TYPE_MODE (TREE_TYPE (exp));
3111 target = gen_reg_rtx (mode);
3113 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3114 need to expand the argument again. This way, we will not perform
3115 side-effects more the once. */
3116 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3118 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
3120 start_sequence ();
3122 /* Compute into TARGET. */
3123 if (expand_sfix_optab (target, op0, builtin_optab))
3125 /* Output the entire sequence. */
3126 insns = get_insns ();
3127 end_sequence ();
3128 emit_insn (insns);
3129 return target;
3132 /* If we were unable to expand via the builtin, stop the sequence
3133 (without outputting the insns). */
3134 end_sequence ();
3136 /* Fall back to floating point rounding optab. */
3137 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
3139 /* For non-C99 targets we may end up without a fallback fndecl here
3140 if the user called __builtin_lfloor directly. In this case emit
3141 a call to the floor/ceil variants nevertheless. This should result
3142 in the best user experience for not full C99 targets. */
3143 if (fallback_fndecl == NULL_TREE)
3145 tree fntype;
3146 const char *name = NULL;
3148 switch (DECL_FUNCTION_CODE (fndecl))
3150 case BUILT_IN_ICEIL:
3151 case BUILT_IN_LCEIL:
3152 case BUILT_IN_LLCEIL:
3153 name = "ceil";
3154 break;
3155 case BUILT_IN_ICEILF:
3156 case BUILT_IN_LCEILF:
3157 case BUILT_IN_LLCEILF:
3158 name = "ceilf";
3159 break;
3160 case BUILT_IN_ICEILL:
3161 case BUILT_IN_LCEILL:
3162 case BUILT_IN_LLCEILL:
3163 name = "ceill";
3164 break;
3165 case BUILT_IN_IFLOOR:
3166 case BUILT_IN_LFLOOR:
3167 case BUILT_IN_LLFLOOR:
3168 name = "floor";
3169 break;
3170 case BUILT_IN_IFLOORF:
3171 case BUILT_IN_LFLOORF:
3172 case BUILT_IN_LLFLOORF:
3173 name = "floorf";
3174 break;
3175 case BUILT_IN_IFLOORL:
3176 case BUILT_IN_LFLOORL:
3177 case BUILT_IN_LLFLOORL:
3178 name = "floorl";
3179 break;
3180 default:
3181 gcc_unreachable ();
3184 fntype = build_function_type_list (TREE_TYPE (arg),
3185 TREE_TYPE (arg), NULL_TREE);
3186 fallback_fndecl = build_fn_decl (name, fntype);
3189 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
3191 tmp = expand_normal (exp);
3192 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
3194 /* Truncate the result of floating point optab to integer
3195 via expand_fix (). */
3196 target = gen_reg_rtx (mode);
3197 expand_fix (target, tmp, 0);
3199 return target;
3202 /* Expand a call to one of the builtin math functions doing integer
3203 conversion (lrint).
3204 Return 0 if a normal call should be emitted rather than expanding the
3205 function in-line. EXP is the expression that is a call to the builtin
3206 function; if convenient, the result should be placed in TARGET. */
3208 static rtx
3209 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
3211 convert_optab builtin_optab;
3212 rtx op0;
3213 rtx_insn *insns;
3214 tree fndecl = get_callee_fndecl (exp);
3215 tree arg;
3216 machine_mode mode;
3217 enum built_in_function fallback_fn = BUILT_IN_NONE;
3219 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3220 return NULL_RTX;
3222 arg = CALL_EXPR_ARG (exp, 0);
3224 switch (DECL_FUNCTION_CODE (fndecl))
3226 CASE_FLT_FN (BUILT_IN_IRINT):
3227 fallback_fn = BUILT_IN_LRINT;
3228 gcc_fallthrough ();
3229 CASE_FLT_FN (BUILT_IN_LRINT):
3230 CASE_FLT_FN (BUILT_IN_LLRINT):
3231 builtin_optab = lrint_optab;
3232 break;
3234 CASE_FLT_FN (BUILT_IN_IROUND):
3235 fallback_fn = BUILT_IN_LROUND;
3236 gcc_fallthrough ();
3237 CASE_FLT_FN (BUILT_IN_LROUND):
3238 CASE_FLT_FN (BUILT_IN_LLROUND):
3239 builtin_optab = lround_optab;
3240 break;
3242 default:
3243 gcc_unreachable ();
3246 /* There's no easy way to detect the case we need to set EDOM. */
3247 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
3248 return NULL_RTX;
3250 /* Make a suitable register to place result in. */
3251 mode = TYPE_MODE (TREE_TYPE (exp));
3253 /* There's no easy way to detect the case we need to set EDOM. */
3254 if (!flag_errno_math)
3256 rtx result = gen_reg_rtx (mode);
3258 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3259 need to expand the argument again. This way, we will not perform
3260 side-effects more the once. */
3261 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3263 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
3265 start_sequence ();
3267 if (expand_sfix_optab (result, op0, builtin_optab))
3269 /* Output the entire sequence. */
3270 insns = get_insns ();
3271 end_sequence ();
3272 emit_insn (insns);
3273 return result;
3276 /* If we were unable to expand via the builtin, stop the sequence
3277 (without outputting the insns) and call to the library function
3278 with the stabilized argument list. */
3279 end_sequence ();
3282 if (fallback_fn != BUILT_IN_NONE)
3284 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
3285 targets, (int) round (x) should never be transformed into
3286 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
3287 a call to lround in the hope that the target provides at least some
3288 C99 functions. This should result in the best user experience for
3289 not full C99 targets.
3290 As scalar float conversions with same mode are useless in GIMPLE,
3291 we can end up e.g. with _Float32 argument passed to float builtin,
3292 try to get the type from the builtin prototype first. */
3293 tree fallback_fndecl = NULL_TREE;
3294 if (tree argtypes = TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
3295 fallback_fndecl
3296 = mathfn_built_in_1 (TREE_VALUE (argtypes),
3297 as_combined_fn (fallback_fn), 0);
3298 if (fallback_fndecl == NULL_TREE)
3299 fallback_fndecl
3300 = mathfn_built_in_1 (TREE_TYPE (arg),
3301 as_combined_fn (fallback_fn), 0);
3302 if (fallback_fndecl)
3304 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
3305 fallback_fndecl, 1, arg);
3307 target = expand_call (exp, NULL_RTX, target == const0_rtx);
3308 target = maybe_emit_group_store (target, TREE_TYPE (exp));
3309 return convert_to_mode (mode, target, 0);
3313 return expand_call (exp, target, target == const0_rtx);
3316 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3317 a normal call should be emitted rather than expanding the function
3318 in-line. EXP is the expression that is a call to the builtin
3319 function; if convenient, the result should be placed in TARGET. */
3321 static rtx
3322 expand_builtin_powi (tree exp, rtx target)
3324 tree arg0, arg1;
3325 rtx op0, op1;
3326 machine_mode mode;
3327 machine_mode mode2;
3329 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3330 return NULL_RTX;
3332 arg0 = CALL_EXPR_ARG (exp, 0);
3333 arg1 = CALL_EXPR_ARG (exp, 1);
3334 mode = TYPE_MODE (TREE_TYPE (exp));
3336 /* Emit a libcall to libgcc. */
3338 /* Mode of the 2nd argument must match that of an int. */
3339 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
3341 if (target == NULL_RTX)
3342 target = gen_reg_rtx (mode);
3344 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
3345 if (GET_MODE (op0) != mode)
3346 op0 = convert_to_mode (mode, op0, 0);
3347 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3348 if (GET_MODE (op1) != mode2)
3349 op1 = convert_to_mode (mode2, op1, 0);
3351 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3352 target, LCT_CONST, mode,
3353 op0, mode, op1, mode2);
3355 return target;
3358 /* Expand expression EXP which is a call to the strlen builtin. Return
3359 NULL_RTX if we failed and the caller should emit a normal call, otherwise
3360 try to get the result in TARGET, if convenient. */
3362 static rtx
3363 expand_builtin_strlen (tree exp, rtx target,
3364 machine_mode target_mode)
3366 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3367 return NULL_RTX;
3369 tree src = CALL_EXPR_ARG (exp, 0);
3371 /* If the length can be computed at compile-time, return it. */
3372 if (tree len = c_strlen (src, 0))
3373 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3375 /* If the length can be computed at compile-time and is constant
3376 integer, but there are side-effects in src, evaluate
3377 src for side-effects, then return len.
3378 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3379 can be optimized into: i++; x = 3; */
3380 tree len = c_strlen (src, 1);
3381 if (len && TREE_CODE (len) == INTEGER_CST)
3383 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3384 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3387 unsigned int align = get_pointer_alignment (src) / BITS_PER_UNIT;
3389 /* If SRC is not a pointer type, don't do this operation inline. */
3390 if (align == 0)
3391 return NULL_RTX;
3393 /* Bail out if we can't compute strlen in the right mode. */
3394 machine_mode insn_mode;
3395 enum insn_code icode = CODE_FOR_nothing;
3396 FOR_EACH_MODE_FROM (insn_mode, target_mode)
3398 icode = optab_handler (strlen_optab, insn_mode);
3399 if (icode != CODE_FOR_nothing)
3400 break;
3402 if (insn_mode == VOIDmode)
3403 return NULL_RTX;
3405 /* Make a place to hold the source address. We will not expand
3406 the actual source until we are sure that the expansion will
3407 not fail -- there are trees that cannot be expanded twice. */
3408 rtx src_reg = gen_reg_rtx (Pmode);
3410 /* Mark the beginning of the strlen sequence so we can emit the
3411 source operand later. */
3412 rtx_insn *before_strlen = get_last_insn ();
3414 class expand_operand ops[4];
3415 create_output_operand (&ops[0], target, insn_mode);
3416 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3417 create_integer_operand (&ops[2], 0);
3418 create_integer_operand (&ops[3], align);
3419 if (!maybe_expand_insn (icode, 4, ops))
3420 return NULL_RTX;
3422 /* Check to see if the argument was declared attribute nonstring
3423 and if so, issue a warning since at this point it's not known
3424 to be nul-terminated. */
3425 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3427 /* Now that we are assured of success, expand the source. */
3428 start_sequence ();
3429 rtx pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3430 if (pat != src_reg)
3432 #ifdef POINTERS_EXTEND_UNSIGNED
3433 if (GET_MODE (pat) != Pmode)
3434 pat = convert_to_mode (Pmode, pat,
3435 POINTERS_EXTEND_UNSIGNED);
3436 #endif
3437 emit_move_insn (src_reg, pat);
3439 pat = get_insns ();
3440 end_sequence ();
3442 if (before_strlen)
3443 emit_insn_after (pat, before_strlen);
3444 else
3445 emit_insn_before (pat, get_insns ());
3447 /* Return the value in the proper mode for this function. */
3448 if (GET_MODE (ops[0].value) == target_mode)
3449 target = ops[0].value;
3450 else if (target != 0)
3451 convert_move (target, ops[0].value, 0);
3452 else
3453 target = convert_to_mode (target_mode, ops[0].value, 0);
3455 return target;
3458 /* Expand call EXP to the strnlen built-in, returning the result
3459 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3461 static rtx
3462 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3464 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3465 return NULL_RTX;
3467 tree src = CALL_EXPR_ARG (exp, 0);
3468 tree bound = CALL_EXPR_ARG (exp, 1);
3470 if (!bound)
3471 return NULL_RTX;
3473 location_t loc = UNKNOWN_LOCATION;
3474 if (EXPR_HAS_LOCATION (exp))
3475 loc = EXPR_LOCATION (exp);
3477 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3478 so these conversions aren't necessary. */
3479 c_strlen_data lendata = { };
3480 tree len = c_strlen (src, 0, &lendata, 1);
3481 if (len)
3482 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3484 if (TREE_CODE (bound) == INTEGER_CST)
3486 if (!len)
3487 return NULL_RTX;
3489 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3490 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3493 if (TREE_CODE (bound) != SSA_NAME)
3494 return NULL_RTX;
3496 wide_int min, max;
3497 value_range r;
3498 get_global_range_query ()->range_of_expr (r, bound);
3499 if (r.varying_p () || r.undefined_p ())
3500 return NULL_RTX;
3501 min = r.lower_bound ();
3502 max = r.upper_bound ();
3504 if (!len || TREE_CODE (len) != INTEGER_CST)
3506 bool exact;
3507 lendata.decl = unterminated_array (src, &len, &exact);
3508 if (!lendata.decl)
3509 return NULL_RTX;
3512 if (lendata.decl)
3513 return NULL_RTX;
3515 if (wi::gtu_p (min, wi::to_wide (len)))
3516 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3518 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3519 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3522 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3523 bytes from bytes at DATA + OFFSET and return it reinterpreted as
3524 a target constant. */
3526 static rtx
3527 builtin_memcpy_read_str (void *data, void *, HOST_WIDE_INT offset,
3528 fixed_size_mode mode)
3530 /* The REPresentation pointed to by DATA need not be a nul-terminated
3531 string but the caller guarantees it's large enough for MODE. */
3532 const char *rep = (const char *) data;
3534 /* The by-pieces infrastructure does not try to pick a vector mode
3535 for memcpy expansion. */
3536 return c_readstr (rep + offset, as_a <scalar_int_mode> (mode),
3537 /*nul_terminated=*/false);
3540 /* LEN specify length of the block of memcpy/memset operation.
3541 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3542 In some cases we can make very likely guess on max size, then we
3543 set it into PROBABLE_MAX_SIZE. */
3545 static void
3546 determine_block_size (tree len, rtx len_rtx,
3547 unsigned HOST_WIDE_INT *min_size,
3548 unsigned HOST_WIDE_INT *max_size,
3549 unsigned HOST_WIDE_INT *probable_max_size)
3551 if (CONST_INT_P (len_rtx))
3553 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3554 return;
3556 else
3558 wide_int min, max;
3559 enum value_range_kind range_type = VR_UNDEFINED;
3561 /* Determine bounds from the type. */
3562 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3563 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3564 else
3565 *min_size = 0;
3566 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3567 *probable_max_size = *max_size
3568 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3569 else
3570 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3572 if (TREE_CODE (len) == SSA_NAME)
3574 value_range r;
3575 tree tmin, tmax;
3576 get_global_range_query ()->range_of_expr (r, len);
3577 range_type = get_legacy_range (r, tmin, tmax);
3578 if (range_type != VR_UNDEFINED)
3580 min = wi::to_wide (tmin);
3581 max = wi::to_wide (tmax);
3584 if (range_type == VR_RANGE)
3586 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3587 *min_size = min.to_uhwi ();
3588 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3589 *probable_max_size = *max_size = max.to_uhwi ();
3591 else if (range_type == VR_ANTI_RANGE)
3593 /* Code like
3595 int n;
3596 if (n < 100)
3597 memcpy (a, b, n)
3599 Produce anti range allowing negative values of N. We still
3600 can use the information and make a guess that N is not negative.
3602 if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3603 *probable_max_size = min.to_uhwi () - 1;
3606 gcc_checking_assert (*max_size <=
3607 (unsigned HOST_WIDE_INT)
3608 GET_MODE_MASK (GET_MODE (len_rtx)));
3611 /* Expand a call EXP to the memcpy builtin.
3612 Return NULL_RTX if we failed, the caller should emit a normal call,
3613 otherwise try to get the result in TARGET, if convenient (and in
3614 mode MODE if that's convenient). */
3616 static rtx
3617 expand_builtin_memcpy (tree exp, rtx target)
3619 if (!validate_arglist (exp,
3620 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3621 return NULL_RTX;
3623 tree dest = CALL_EXPR_ARG (exp, 0);
3624 tree src = CALL_EXPR_ARG (exp, 1);
3625 tree len = CALL_EXPR_ARG (exp, 2);
3627 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3628 /*retmode=*/ RETURN_BEGIN, false);
3631 /* Check a call EXP to the memmove built-in for validity.
3632 Return NULL_RTX on both success and failure. */
3634 static rtx
3635 expand_builtin_memmove (tree exp, rtx target)
3637 if (!validate_arglist (exp,
3638 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3639 return NULL_RTX;
3641 tree dest = CALL_EXPR_ARG (exp, 0);
3642 tree src = CALL_EXPR_ARG (exp, 1);
3643 tree len = CALL_EXPR_ARG (exp, 2);
3645 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3646 /*retmode=*/ RETURN_BEGIN, true);
3649 /* Expand a call EXP to the mempcpy builtin.
3650 Return NULL_RTX if we failed; the caller should emit a normal call,
3651 otherwise try to get the result in TARGET, if convenient (and in
3652 mode MODE if that's convenient). */
3654 static rtx
3655 expand_builtin_mempcpy (tree exp, rtx target)
3657 if (!validate_arglist (exp,
3658 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3659 return NULL_RTX;
3661 tree dest = CALL_EXPR_ARG (exp, 0);
3662 tree src = CALL_EXPR_ARG (exp, 1);
3663 tree len = CALL_EXPR_ARG (exp, 2);
3665 /* Policy does not generally allow using compute_objsize (which
3666 is used internally by check_memop_size) to change code generation
3667 or drive optimization decisions.
3669 In this instance it is safe because the code we generate has
3670 the same semantics regardless of the return value of
3671 check_memop_sizes. Exactly the same amount of data is copied
3672 and the return value is exactly the same in both cases.
3674 Furthermore, check_memop_size always uses mode 0 for the call to
3675 compute_objsize, so the imprecise nature of compute_objsize is
3676 avoided. */
3678 /* Avoid expanding mempcpy into memcpy when the call is determined
3679 to overflow the buffer. This also prevents the same overflow
3680 from being diagnosed again when expanding memcpy. */
3682 return expand_builtin_mempcpy_args (dest, src, len,
3683 target, exp, /*retmode=*/ RETURN_END);
3686 /* Helper function to do the actual work for expand of memory copy family
3687 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3688 of memory from SRC to DEST and assign to TARGET if convenient. Return
3689 value is based on RETMODE argument. */
3691 static rtx
3692 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3693 rtx target, tree exp, memop_ret retmode,
3694 bool might_overlap)
3696 unsigned int src_align = get_pointer_alignment (src);
3697 unsigned int dest_align = get_pointer_alignment (dest);
3698 rtx dest_mem, src_mem, dest_addr, len_rtx;
3699 HOST_WIDE_INT expected_size = -1;
3700 unsigned int expected_align = 0;
3701 unsigned HOST_WIDE_INT min_size;
3702 unsigned HOST_WIDE_INT max_size;
3703 unsigned HOST_WIDE_INT probable_max_size;
3705 bool is_move_done;
3707 /* If DEST is not a pointer type, call the normal function. */
3708 if (dest_align == 0)
3709 return NULL_RTX;
3711 /* If either SRC is not a pointer type, don't do this
3712 operation in-line. */
3713 if (src_align == 0)
3714 return NULL_RTX;
3716 if (currently_expanding_gimple_stmt)
3717 stringop_block_profile (currently_expanding_gimple_stmt,
3718 &expected_align, &expected_size);
3720 if (expected_align < dest_align)
3721 expected_align = dest_align;
3722 dest_mem = get_memory_rtx (dest, len);
3723 set_mem_align (dest_mem, dest_align);
3724 len_rtx = expand_normal (len);
3725 determine_block_size (len, len_rtx, &min_size, &max_size,
3726 &probable_max_size);
3728 /* Try to get the byte representation of the constant SRC points to,
3729 with its byte size in NBYTES. */
3730 unsigned HOST_WIDE_INT nbytes;
3731 const char *rep = getbyterep (src, &nbytes);
3733 /* If the function's constant bound LEN_RTX is less than or equal
3734 to the byte size of the representation of the constant argument,
3735 and if block move would be done by pieces, we can avoid loading
3736 the bytes from memory and only store the computed constant.
3737 This works in the overlap (memmove) case as well because
3738 store_by_pieces just generates a series of stores of constants
3739 from the representation returned by getbyterep(). */
3740 if (rep
3741 && CONST_INT_P (len_rtx)
3742 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes
3743 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3744 CONST_CAST (char *, rep),
3745 dest_align, false))
3747 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3748 builtin_memcpy_read_str,
3749 CONST_CAST (char *, rep),
3750 dest_align, false, retmode);
3751 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3752 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3753 return dest_mem;
3756 src_mem = get_memory_rtx (src, len);
3757 set_mem_align (src_mem, src_align);
3759 /* Copy word part most expediently. */
3760 enum block_op_methods method = BLOCK_OP_NORMAL;
3761 if (CALL_EXPR_TAILCALL (exp)
3762 && (retmode == RETURN_BEGIN || target == const0_rtx))
3763 method = BLOCK_OP_TAILCALL;
3764 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
3765 && retmode == RETURN_END
3766 && !might_overlap
3767 && target != const0_rtx);
3768 if (use_mempcpy_call)
3769 method = BLOCK_OP_NO_LIBCALL_RET;
3770 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3771 expected_align, expected_size,
3772 min_size, max_size, probable_max_size,
3773 use_mempcpy_call, &is_move_done,
3774 might_overlap);
3776 /* Bail out when a mempcpy call would be expanded as libcall and when
3777 we have a target that provides a fast implementation
3778 of mempcpy routine. */
3779 if (!is_move_done)
3780 return NULL_RTX;
3782 if (dest_addr == pc_rtx)
3783 return NULL_RTX;
3785 if (dest_addr == 0)
3787 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3788 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3791 if (retmode != RETURN_BEGIN && target != const0_rtx)
3793 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3794 /* stpcpy pointer to last byte. */
3795 if (retmode == RETURN_END_MINUS_ONE)
3796 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3799 return dest_addr;
3802 static rtx
3803 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3804 rtx target, tree orig_exp, memop_ret retmode)
3806 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3807 retmode, false);
3810 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3811 we failed, the caller should emit a normal call, otherwise try to
3812 get the result in TARGET, if convenient.
3813 Return value is based on RETMODE argument. */
3815 static rtx
3816 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
3818 class expand_operand ops[3];
3819 rtx dest_mem;
3820 rtx src_mem;
3822 if (!targetm.have_movstr ())
3823 return NULL_RTX;
3825 dest_mem = get_memory_rtx (dest, NULL);
3826 src_mem = get_memory_rtx (src, NULL);
3827 if (retmode == RETURN_BEGIN)
3829 target = force_reg (Pmode, XEXP (dest_mem, 0));
3830 dest_mem = replace_equiv_address (dest_mem, target);
3833 create_output_operand (&ops[0],
3834 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
3835 create_fixed_operand (&ops[1], dest_mem);
3836 create_fixed_operand (&ops[2], src_mem);
3837 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3838 return NULL_RTX;
3840 if (retmode != RETURN_BEGIN && target != const0_rtx)
3842 target = ops[0].value;
3843 /* movstr is supposed to set end to the address of the NUL
3844 terminator. If the caller requested a mempcpy-like return value,
3845 adjust it. */
3846 if (retmode == RETURN_END)
3848 rtx tem = plus_constant (GET_MODE (target),
3849 gen_lowpart (GET_MODE (target), target), 1);
3850 emit_move_insn (target, force_operand (tem, NULL_RTX));
3853 return target;
3856 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3857 NULL_RTX if we failed the caller should emit a normal call, otherwise
3858 try to get the result in TARGET, if convenient (and in mode MODE if that's
3859 convenient). */
3861 static rtx
3862 expand_builtin_strcpy (tree exp, rtx target)
3864 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3865 return NULL_RTX;
3867 tree dest = CALL_EXPR_ARG (exp, 0);
3868 tree src = CALL_EXPR_ARG (exp, 1);
3870 return expand_builtin_strcpy_args (exp, dest, src, target);
3873 /* Helper function to do the actual work for expand_builtin_strcpy. The
3874 arguments to the builtin_strcpy call DEST and SRC are broken out
3875 so that this can also be called without constructing an actual CALL_EXPR.
3876 The other arguments and return value are the same as for
3877 expand_builtin_strcpy. */
3879 static rtx
3880 expand_builtin_strcpy_args (tree, tree dest, tree src, rtx target)
3882 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
3885 /* Expand a call EXP to the stpcpy builtin.
3886 Return NULL_RTX if we failed the caller should emit a normal call,
3887 otherwise try to get the result in TARGET, if convenient (and in
3888 mode MODE if that's convenient). */
3890 static rtx
3891 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
3893 tree dst, src;
3894 location_t loc = EXPR_LOCATION (exp);
3896 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3897 return NULL_RTX;
3899 dst = CALL_EXPR_ARG (exp, 0);
3900 src = CALL_EXPR_ARG (exp, 1);
3902 /* If return value is ignored, transform stpcpy into strcpy. */
3903 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3905 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3906 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3907 return expand_expr (result, target, mode, EXPAND_NORMAL);
3909 else
3911 tree len, lenp1;
3912 rtx ret;
3914 /* Ensure we get an actual string whose length can be evaluated at
3915 compile-time, not an expression containing a string. This is
3916 because the latter will potentially produce pessimized code
3917 when used to produce the return value. */
3918 c_strlen_data lendata = { };
3919 if (!c_getstr (src)
3920 || !(len = c_strlen (src, 0, &lendata, 1)))
3921 return expand_movstr (dst, src, target,
3922 /*retmode=*/ RETURN_END_MINUS_ONE);
3924 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3925 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3926 target, exp,
3927 /*retmode=*/ RETURN_END_MINUS_ONE);
3929 if (ret)
3930 return ret;
3932 if (TREE_CODE (len) == INTEGER_CST)
3934 rtx len_rtx = expand_normal (len);
3936 if (CONST_INT_P (len_rtx))
3938 ret = expand_builtin_strcpy_args (exp, dst, src, target);
3940 if (ret)
3942 if (! target)
3944 if (mode != VOIDmode)
3945 target = gen_reg_rtx (mode);
3946 else
3947 target = gen_reg_rtx (GET_MODE (ret));
3949 if (GET_MODE (target) != GET_MODE (ret))
3950 ret = gen_lowpart (GET_MODE (target), ret);
3952 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3953 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3954 gcc_assert (ret);
3956 return target;
3961 return expand_movstr (dst, src, target,
3962 /*retmode=*/ RETURN_END_MINUS_ONE);
3966 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
3967 arguments while being careful to avoid duplicate warnings (which could
3968 be issued if the expander were to expand the call, resulting in it
3969 being emitted in expand_call(). */
3971 static rtx
3972 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3974 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
3976 /* The call has been successfully expanded. Check for nonstring
3977 arguments and issue warnings as appropriate. */
3978 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3979 return ret;
3982 return NULL_RTX;
3985 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3986 bytes from constant string DATA + OFFSET and return it as target
3987 constant. */
3990 builtin_strncpy_read_str (void *data, void *, HOST_WIDE_INT offset,
3991 fixed_size_mode mode)
3993 const char *str = (const char *) data;
3995 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3996 return const0_rtx;
3998 /* The by-pieces infrastructure does not try to pick a vector mode
3999 for strncpy expansion. */
4000 return c_readstr (str + offset, as_a <scalar_int_mode> (mode));
4003 /* Helper to check the sizes of sequences and the destination of calls
4004 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4005 success (no overflow or invalid sizes), false otherwise. */
4007 static bool
4008 check_strncat_sizes (tree exp, tree objsize)
4010 tree dest = CALL_EXPR_ARG (exp, 0);
4011 tree src = CALL_EXPR_ARG (exp, 1);
4012 tree maxread = CALL_EXPR_ARG (exp, 2);
4014 /* Try to determine the range of lengths that the source expression
4015 refers to. */
4016 c_strlen_data lendata = { };
4017 get_range_strlen (src, &lendata, /* eltsize = */ 1);
4019 /* Try to verify that the destination is big enough for the shortest
4020 string. */
4022 access_data data (nullptr, exp, access_read_write, maxread, true);
4023 if (!objsize && warn_stringop_overflow)
4025 /* If it hasn't been provided by __strncat_chk, try to determine
4026 the size of the destination object into which the source is
4027 being copied. */
4028 objsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
4031 /* Add one for the terminating nul. */
4032 tree srclen = (lendata.minlen
4033 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
4034 size_one_node)
4035 : NULL_TREE);
4037 /* The strncat function copies at most MAXREAD bytes and always appends
4038 the terminating nul so the specified upper bound should never be equal
4039 to (or greater than) the size of the destination. */
4040 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4041 && tree_int_cst_equal (objsize, maxread))
4043 location_t loc = EXPR_LOCATION (exp);
4044 warning_at (loc, OPT_Wstringop_overflow_,
4045 "%qD specified bound %E equals destination size",
4046 get_callee_fndecl (exp), maxread);
4048 return false;
4051 if (!srclen
4052 || (maxread && tree_fits_uhwi_p (maxread)
4053 && tree_fits_uhwi_p (srclen)
4054 && tree_int_cst_lt (maxread, srclen)))
4055 srclen = maxread;
4057 /* The number of bytes to write is LEN but check_access will alsoa
4058 check SRCLEN if LEN's value isn't known. */
4059 return check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
4060 objsize, data.mode, &data);
4063 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4064 NULL_RTX if we failed the caller should emit a normal call. */
4066 static rtx
4067 expand_builtin_strncpy (tree exp, rtx target)
4069 location_t loc = EXPR_LOCATION (exp);
4071 if (!validate_arglist (exp,
4072 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4073 return NULL_RTX;
4074 tree dest = CALL_EXPR_ARG (exp, 0);
4075 tree src = CALL_EXPR_ARG (exp, 1);
4076 /* The number of bytes to write (not the maximum). */
4077 tree len = CALL_EXPR_ARG (exp, 2);
4079 /* The length of the source sequence. */
4080 tree slen = c_strlen (src, 1);
4082 /* We must be passed a constant len and src parameter. */
4083 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4084 return NULL_RTX;
4086 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4088 /* We're required to pad with trailing zeros if the requested
4089 len is greater than strlen(s2)+1. In that case try to
4090 use store_by_pieces, if it fails, punt. */
4091 if (tree_int_cst_lt (slen, len))
4093 unsigned int dest_align = get_pointer_alignment (dest);
4094 const char *p = c_getstr (src);
4095 rtx dest_mem;
4097 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4098 || !can_store_by_pieces (tree_to_uhwi (len),
4099 builtin_strncpy_read_str,
4100 CONST_CAST (char *, p),
4101 dest_align, false))
4102 return NULL_RTX;
4104 dest_mem = get_memory_rtx (dest, len);
4105 store_by_pieces (dest_mem, tree_to_uhwi (len),
4106 builtin_strncpy_read_str,
4107 CONST_CAST (char *, p), dest_align, false,
4108 RETURN_BEGIN);
4109 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4110 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4111 return dest_mem;
4114 return NULL_RTX;
4117 /* Return the RTL of a register in MODE generated from PREV in the
4118 previous iteration. */
4120 static rtx
4121 gen_memset_value_from_prev (by_pieces_prev *prev, fixed_size_mode mode)
4123 rtx target = nullptr;
4124 if (prev != nullptr && prev->data != nullptr)
4126 /* Use the previous data in the same mode. */
4127 if (prev->mode == mode)
4128 return prev->data;
4130 fixed_size_mode prev_mode = prev->mode;
4132 /* Don't use the previous data to write QImode if it is in a
4133 vector mode. */
4134 if (VECTOR_MODE_P (prev_mode) && mode == QImode)
4135 return target;
4137 rtx prev_rtx = prev->data;
4139 if (REG_P (prev_rtx)
4140 && HARD_REGISTER_P (prev_rtx)
4141 && lowpart_subreg_regno (REGNO (prev_rtx), prev_mode, mode) < 0)
4143 /* This case occurs when PREV_MODE is a vector and when
4144 MODE is too small to store using vector operations.
4145 After register allocation, the code will need to move the
4146 lowpart of the vector register into a non-vector register.
4148 Also, the target has chosen to use a hard register
4149 instead of going with the default choice of using a
4150 pseudo register. We should respect that choice and try to
4151 avoid creating a pseudo register with the same mode as the
4152 current hard register.
4154 In principle, we could just use a lowpart MODE subreg of
4155 the vector register. However, the vector register mode might
4156 be too wide for non-vector registers, and we already know
4157 that the non-vector mode is too small for vector registers.
4158 It's therefore likely that we'd need to spill to memory in
4159 the vector mode and reload the non-vector value from there.
4161 Try to avoid that by reducing the vector register to the
4162 smallest size that it can hold. This should increase the
4163 chances that non-vector registers can hold both the inner
4164 and outer modes of the subreg that we generate later. */
4165 machine_mode m;
4166 fixed_size_mode candidate;
4167 FOR_EACH_MODE_IN_CLASS (m, GET_MODE_CLASS (mode))
4168 if (is_a<fixed_size_mode> (m, &candidate))
4170 if (GET_MODE_SIZE (candidate)
4171 >= GET_MODE_SIZE (prev_mode))
4172 break;
4173 if (GET_MODE_SIZE (candidate) >= GET_MODE_SIZE (mode)
4174 && lowpart_subreg_regno (REGNO (prev_rtx),
4175 prev_mode, candidate) >= 0)
4177 target = lowpart_subreg (candidate, prev_rtx,
4178 prev_mode);
4179 prev_rtx = target;
4180 prev_mode = candidate;
4181 break;
4184 if (target == nullptr)
4185 prev_rtx = copy_to_reg (prev_rtx);
4188 target = lowpart_subreg (mode, prev_rtx, prev_mode);
4190 return target;
4193 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4194 bytes from constant string DATA + OFFSET and return it as target
4195 constant. If PREV isn't nullptr, it has the RTL info from the
4196 previous iteration. */
4199 builtin_memset_read_str (void *data, void *prev,
4200 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4201 fixed_size_mode mode)
4203 const char *c = (const char *) data;
4204 unsigned int size = GET_MODE_SIZE (mode);
4206 rtx target = gen_memset_value_from_prev ((by_pieces_prev *) prev,
4207 mode);
4208 if (target != nullptr)
4209 return target;
4210 rtx src = gen_int_mode (*c, QImode);
4212 if (VECTOR_MODE_P (mode))
4214 gcc_assert (GET_MODE_INNER (mode) == QImode);
4216 rtx const_vec = gen_const_vec_duplicate (mode, src);
4217 if (prev == NULL)
4218 /* Return CONST_VECTOR when called by a query function. */
4219 return const_vec;
4221 /* Use the move expander with CONST_VECTOR. */
4222 target = gen_reg_rtx (mode);
4223 emit_move_insn (target, const_vec);
4224 return target;
4227 char *p = XALLOCAVEC (char, size);
4229 memset (p, *c, size);
4231 /* Vector modes should be handled above. */
4232 return c_readstr (p, as_a <scalar_int_mode> (mode));
4235 /* Callback routine for store_by_pieces. Return the RTL of a register
4236 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4237 char value given in the RTL register data. For example, if mode is
4238 4 bytes wide, return the RTL for 0x01010101*data. If PREV isn't
4239 nullptr, it has the RTL info from the previous iteration. */
4241 static rtx
4242 builtin_memset_gen_str (void *data, void *prev,
4243 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4244 fixed_size_mode mode)
4246 rtx target, coeff;
4247 size_t size;
4248 char *p;
4250 size = GET_MODE_SIZE (mode);
4251 if (size == 1)
4252 return (rtx) data;
4254 target = gen_memset_value_from_prev ((by_pieces_prev *) prev, mode);
4255 if (target != nullptr)
4256 return target;
4258 if (VECTOR_MODE_P (mode))
4260 gcc_assert (GET_MODE_INNER (mode) == QImode);
4262 /* vec_duplicate_optab is a precondition to pick a vector mode for
4263 the memset expander. */
4264 insn_code icode = optab_handler (vec_duplicate_optab, mode);
4266 target = gen_reg_rtx (mode);
4267 class expand_operand ops[2];
4268 create_output_operand (&ops[0], target, mode);
4269 create_input_operand (&ops[1], (rtx) data, QImode);
4270 expand_insn (icode, 2, ops);
4271 if (!rtx_equal_p (target, ops[0].value))
4272 emit_move_insn (target, ops[0].value);
4274 return target;
4277 p = XALLOCAVEC (char, size);
4278 memset (p, 1, size);
4279 /* Vector modes should be handled above. */
4280 coeff = c_readstr (p, as_a <scalar_int_mode> (mode));
4282 target = convert_to_mode (mode, (rtx) data, 1);
4283 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4284 return force_reg (mode, target);
4287 /* Expand expression EXP, which is a call to the memset builtin. Return
4288 NULL_RTX if we failed the caller should emit a normal call, otherwise
4289 try to get the result in TARGET, if convenient (and in mode MODE if that's
4290 convenient). */
4293 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4295 if (!validate_arglist (exp,
4296 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4297 return NULL_RTX;
4299 tree dest = CALL_EXPR_ARG (exp, 0);
4300 tree val = CALL_EXPR_ARG (exp, 1);
4301 tree len = CALL_EXPR_ARG (exp, 2);
4303 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4306 /* Try to store VAL (or, if NULL_RTX, VALC) in LEN bytes starting at TO.
4307 Return TRUE if successful, FALSE otherwise. TO is assumed to be
4308 aligned at an ALIGN-bits boundary. LEN must be a multiple of
4309 1<<CTZ_LEN between MIN_LEN and MAX_LEN.
4311 The strategy is to issue one store_by_pieces for each power of two,
4312 from most to least significant, guarded by a test on whether there
4313 are at least that many bytes left to copy in LEN.
4315 ??? Should we skip some powers of two in favor of loops? Maybe start
4316 at the max of TO/LEN/word alignment, at least when optimizing for
4317 size, instead of ensuring O(log len) dynamic compares? */
4319 bool
4320 try_store_by_multiple_pieces (rtx to, rtx len, unsigned int ctz_len,
4321 unsigned HOST_WIDE_INT min_len,
4322 unsigned HOST_WIDE_INT max_len,
4323 rtx val, char valc, unsigned int align)
4325 int max_bits = floor_log2 (max_len);
4326 int min_bits = floor_log2 (min_len);
4327 int sctz_len = ctz_len;
4329 gcc_checking_assert (sctz_len >= 0);
4331 if (val)
4332 valc = 1;
4334 /* Bits more significant than TST_BITS are part of the shared prefix
4335 in the binary representation of both min_len and max_len. Since
4336 they're identical, we don't need to test them in the loop. */
4337 int tst_bits = (max_bits != min_bits ? max_bits
4338 : floor_log2 (max_len ^ min_len));
4340 /* Check whether it's profitable to start by storing a fixed BLKSIZE
4341 bytes, to lower max_bits. In the unlikely case of a constant LEN
4342 (implied by identical MAX_LEN and MIN_LEN), we want to issue a
4343 single store_by_pieces, but otherwise, select the minimum multiple
4344 of the ALIGN (in bytes) and of the MCD of the possible LENs, that
4345 brings MAX_LEN below TST_BITS, if that's lower than min_len. */
4346 unsigned HOST_WIDE_INT blksize;
4347 if (max_len > min_len)
4349 unsigned HOST_WIDE_INT alrng = MAX (HOST_WIDE_INT_1U << ctz_len,
4350 align / BITS_PER_UNIT);
4351 blksize = max_len - (HOST_WIDE_INT_1U << tst_bits) + alrng;
4352 blksize &= ~(alrng - 1);
4354 else if (max_len == min_len)
4355 blksize = max_len;
4356 else
4357 /* Huh, max_len < min_len? Punt. See pr100843.c. */
4358 return false;
4359 if (min_len >= blksize)
4361 min_len -= blksize;
4362 min_bits = floor_log2 (min_len);
4363 max_len -= blksize;
4364 max_bits = floor_log2 (max_len);
4366 tst_bits = (max_bits != min_bits ? max_bits
4367 : floor_log2 (max_len ^ min_len));
4369 else
4370 blksize = 0;
4372 /* Check that we can use store by pieces for the maximum store count
4373 we may issue (initial fixed-size block, plus conditional
4374 power-of-two-sized from max_bits to ctz_len. */
4375 unsigned HOST_WIDE_INT xlenest = blksize;
4376 if (max_bits >= 0)
4377 xlenest += ((HOST_WIDE_INT_1U << max_bits) * 2
4378 - (HOST_WIDE_INT_1U << ctz_len));
4379 if (!can_store_by_pieces (xlenest, builtin_memset_read_str,
4380 &valc, align, true))
4381 return false;
4383 by_pieces_constfn constfun;
4384 void *constfundata;
4385 if (val)
4387 constfun = builtin_memset_gen_str;
4388 constfundata = val = force_reg (TYPE_MODE (unsigned_char_type_node),
4389 val);
4391 else
4393 constfun = builtin_memset_read_str;
4394 constfundata = &valc;
4397 rtx ptr = copy_addr_to_reg (XEXP (to, 0));
4398 rtx rem = copy_to_mode_reg (ptr_mode, convert_to_mode (ptr_mode, len, 0));
4399 to = replace_equiv_address (to, ptr);
4400 set_mem_align (to, align);
4402 if (blksize)
4404 to = store_by_pieces (to, blksize,
4405 constfun, constfundata,
4406 align, true,
4407 max_len != 0 ? RETURN_END : RETURN_BEGIN);
4408 if (max_len == 0)
4409 return true;
4411 /* Adjust PTR, TO and REM. Since TO's address is likely
4412 PTR+offset, we have to replace it. */
4413 emit_move_insn (ptr, force_operand (XEXP (to, 0), NULL_RTX));
4414 to = replace_equiv_address (to, ptr);
4415 rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
4416 emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
4419 /* Iterate over power-of-two block sizes from the maximum length to
4420 the least significant bit possibly set in the length. */
4421 for (int i = max_bits; i >= sctz_len; i--)
4423 rtx_code_label *label = NULL;
4424 blksize = HOST_WIDE_INT_1U << i;
4426 /* If we're past the bits shared between min_ and max_len, expand
4427 a test on the dynamic length, comparing it with the
4428 BLKSIZE. */
4429 if (i <= tst_bits)
4431 label = gen_label_rtx ();
4432 emit_cmp_and_jump_insns (rem, GEN_INT (blksize), LT, NULL,
4433 ptr_mode, 1, label,
4434 profile_probability::even ());
4436 /* If we are at a bit that is in the prefix shared by min_ and
4437 max_len, skip this BLKSIZE if the bit is clear. */
4438 else if ((max_len & blksize) == 0)
4439 continue;
4441 /* Issue a store of BLKSIZE bytes. */
4442 to = store_by_pieces (to, blksize,
4443 constfun, constfundata,
4444 align, true,
4445 i != sctz_len ? RETURN_END : RETURN_BEGIN);
4447 /* Adjust REM and PTR, unless this is the last iteration. */
4448 if (i != sctz_len)
4450 emit_move_insn (ptr, force_operand (XEXP (to, 0), NULL_RTX));
4451 to = replace_equiv_address (to, ptr);
4452 rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
4453 emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
4456 if (label)
4458 emit_label (label);
4460 /* Given conditional stores, the offset can no longer be
4461 known, so clear it. */
4462 clear_mem_offset (to);
4466 return true;
4469 /* Helper function to do the actual work for expand_builtin_memset. The
4470 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4471 so that this can also be called without constructing an actual CALL_EXPR.
4472 The other arguments and return value are the same as for
4473 expand_builtin_memset. */
4475 static rtx
4476 expand_builtin_memset_args (tree dest, tree val, tree len,
4477 rtx target, machine_mode mode, tree orig_exp)
4479 tree fndecl, fn;
4480 enum built_in_function fcode;
4481 machine_mode val_mode;
4482 char c;
4483 unsigned int dest_align;
4484 rtx dest_mem, dest_addr, len_rtx;
4485 HOST_WIDE_INT expected_size = -1;
4486 unsigned int expected_align = 0;
4487 unsigned HOST_WIDE_INT min_size;
4488 unsigned HOST_WIDE_INT max_size;
4489 unsigned HOST_WIDE_INT probable_max_size;
4491 dest_align = get_pointer_alignment (dest);
4493 /* If DEST is not a pointer type, don't do this operation in-line. */
4494 if (dest_align == 0)
4495 return NULL_RTX;
4497 if (currently_expanding_gimple_stmt)
4498 stringop_block_profile (currently_expanding_gimple_stmt,
4499 &expected_align, &expected_size);
4501 if (expected_align < dest_align)
4502 expected_align = dest_align;
4504 /* If the LEN parameter is zero, return DEST. */
4505 if (integer_zerop (len))
4507 /* Evaluate and ignore VAL in case it has side-effects. */
4508 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4509 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4512 /* Stabilize the arguments in case we fail. */
4513 dest = builtin_save_expr (dest);
4514 val = builtin_save_expr (val);
4515 len = builtin_save_expr (len);
4517 len_rtx = expand_normal (len);
4518 determine_block_size (len, len_rtx, &min_size, &max_size,
4519 &probable_max_size);
4520 dest_mem = get_memory_rtx (dest, len);
4521 val_mode = TYPE_MODE (unsigned_char_type_node);
4523 if (TREE_CODE (val) != INTEGER_CST
4524 || target_char_cast (val, &c))
4526 rtx val_rtx;
4528 val_rtx = expand_normal (val);
4529 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4531 /* Assume that we can memset by pieces if we can store
4532 * the coefficients by pieces (in the required modes).
4533 * We can't pass builtin_memset_gen_str as that emits RTL. */
4534 c = 1;
4535 if (tree_fits_uhwi_p (len)
4536 && can_store_by_pieces (tree_to_uhwi (len),
4537 builtin_memset_read_str, &c, dest_align,
4538 true))
4540 val_rtx = force_reg (val_mode, val_rtx);
4541 store_by_pieces (dest_mem, tree_to_uhwi (len),
4542 builtin_memset_gen_str, val_rtx, dest_align,
4543 true, RETURN_BEGIN);
4545 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4546 dest_align, expected_align,
4547 expected_size, min_size, max_size,
4548 probable_max_size)
4549 && !try_store_by_multiple_pieces (dest_mem, len_rtx,
4550 tree_ctz (len),
4551 min_size, max_size,
4552 val_rtx, 0,
4553 dest_align))
4554 goto do_libcall;
4556 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4557 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4558 return dest_mem;
4561 if (c)
4563 if (tree_fits_uhwi_p (len)
4564 && can_store_by_pieces (tree_to_uhwi (len),
4565 builtin_memset_read_str, &c, dest_align,
4566 true))
4567 store_by_pieces (dest_mem, tree_to_uhwi (len),
4568 builtin_memset_read_str, &c, dest_align, true,
4569 RETURN_BEGIN);
4570 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4571 gen_int_mode (c, val_mode),
4572 dest_align, expected_align,
4573 expected_size, min_size, max_size,
4574 probable_max_size)
4575 && !try_store_by_multiple_pieces (dest_mem, len_rtx,
4576 tree_ctz (len),
4577 min_size, max_size,
4578 NULL_RTX, c,
4579 dest_align))
4580 goto do_libcall;
4582 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4583 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4584 return dest_mem;
4587 set_mem_align (dest_mem, dest_align);
4588 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4589 CALL_EXPR_TAILCALL (orig_exp)
4590 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4591 expected_align, expected_size,
4592 min_size, max_size,
4593 probable_max_size, tree_ctz (len));
4595 if (dest_addr == 0)
4597 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4598 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4601 return dest_addr;
4603 do_libcall:
4604 fndecl = get_callee_fndecl (orig_exp);
4605 fcode = DECL_FUNCTION_CODE (fndecl);
4606 if (fcode == BUILT_IN_MEMSET)
4607 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4608 dest, val, len);
4609 else if (fcode == BUILT_IN_BZERO)
4610 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4611 dest, len);
4612 else
4613 gcc_unreachable ();
4614 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4615 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4616 return expand_call (fn, target, target == const0_rtx);
4619 /* Expand expression EXP, which is a call to the bzero builtin. Return
4620 NULL_RTX if we failed the caller should emit a normal call. */
4622 static rtx
4623 expand_builtin_bzero (tree exp)
4625 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4626 return NULL_RTX;
4628 tree dest = CALL_EXPR_ARG (exp, 0);
4629 tree size = CALL_EXPR_ARG (exp, 1);
4631 /* New argument list transforming bzero(ptr x, int y) to
4632 memset(ptr x, int 0, size_t y). This is done this way
4633 so that if it isn't expanded inline, we fallback to
4634 calling bzero instead of memset. */
4636 location_t loc = EXPR_LOCATION (exp);
4638 return expand_builtin_memset_args (dest, integer_zero_node,
4639 fold_convert_loc (loc,
4640 size_type_node, size),
4641 const0_rtx, VOIDmode, exp);
4644 /* Try to expand cmpstr operation ICODE with the given operands.
4645 Return the result rtx on success, otherwise return null. */
4647 static rtx
4648 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4649 HOST_WIDE_INT align)
4651 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4653 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4654 target = NULL_RTX;
4656 class expand_operand ops[4];
4657 create_output_operand (&ops[0], target, insn_mode);
4658 create_fixed_operand (&ops[1], arg1_rtx);
4659 create_fixed_operand (&ops[2], arg2_rtx);
4660 create_integer_operand (&ops[3], align);
4661 if (maybe_expand_insn (icode, 4, ops))
4662 return ops[0].value;
4663 return NULL_RTX;
4666 /* Expand expression EXP, which is a call to the memcmp built-in function.
4667 Return NULL_RTX if we failed and the caller should emit a normal call,
4668 otherwise try to get the result in TARGET, if convenient.
4669 RESULT_EQ is true if we can relax the returned value to be either zero
4670 or nonzero, without caring about the sign. */
4672 static rtx
4673 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4675 if (!validate_arglist (exp,
4676 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4677 return NULL_RTX;
4679 tree arg1 = CALL_EXPR_ARG (exp, 0);
4680 tree arg2 = CALL_EXPR_ARG (exp, 1);
4681 tree len = CALL_EXPR_ARG (exp, 2);
4683 /* Due to the performance benefit, always inline the calls first
4684 when result_eq is false. */
4685 rtx result = NULL_RTX;
4686 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4687 if (!result_eq && fcode != BUILT_IN_BCMP)
4689 result = inline_expand_builtin_bytecmp (exp, target);
4690 if (result)
4691 return result;
4694 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4695 location_t loc = EXPR_LOCATION (exp);
4697 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4698 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4700 /* If we don't have POINTER_TYPE, call the function. */
4701 if (arg1_align == 0 || arg2_align == 0)
4702 return NULL_RTX;
4704 rtx arg1_rtx = get_memory_rtx (arg1, len);
4705 rtx arg2_rtx = get_memory_rtx (arg2, len);
4706 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4708 /* Set MEM_SIZE as appropriate. */
4709 if (CONST_INT_P (len_rtx))
4711 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4712 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4715 by_pieces_constfn constfn = NULL;
4717 /* Try to get the byte representation of the constant ARG2 (or, only
4718 when the function's result is used for equality to zero, ARG1)
4719 points to, with its byte size in NBYTES. */
4720 unsigned HOST_WIDE_INT nbytes;
4721 const char *rep = getbyterep (arg2, &nbytes);
4722 if (result_eq && rep == NULL)
4724 /* For equality to zero the arguments are interchangeable. */
4725 rep = getbyterep (arg1, &nbytes);
4726 if (rep != NULL)
4727 std::swap (arg1_rtx, arg2_rtx);
4730 /* If the function's constant bound LEN_RTX is less than or equal
4731 to the byte size of the representation of the constant argument,
4732 and if block move would be done by pieces, we can avoid loading
4733 the bytes from memory and only store the computed constant result. */
4734 if (rep
4735 && CONST_INT_P (len_rtx)
4736 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes)
4737 constfn = builtin_memcpy_read_str;
4739 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4740 TREE_TYPE (len), target,
4741 result_eq, constfn,
4742 CONST_CAST (char *, rep));
4744 if (result)
4746 /* Return the value in the proper mode for this function. */
4747 if (GET_MODE (result) == mode)
4748 return result;
4750 if (target != 0)
4752 convert_move (target, result, 0);
4753 return target;
4756 return convert_to_mode (mode, result, 0);
4759 return NULL_RTX;
4762 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4763 if we failed the caller should emit a normal call, otherwise try to get
4764 the result in TARGET, if convenient. */
4766 static rtx
4767 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4769 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4770 return NULL_RTX;
4772 tree arg1 = CALL_EXPR_ARG (exp, 0);
4773 tree arg2 = CALL_EXPR_ARG (exp, 1);
4775 /* Due to the performance benefit, always inline the calls first. */
4776 rtx result = NULL_RTX;
4777 result = inline_expand_builtin_bytecmp (exp, target);
4778 if (result)
4779 return result;
4781 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4782 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4783 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4784 return NULL_RTX;
4786 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4787 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4789 /* If we don't have POINTER_TYPE, call the function. */
4790 if (arg1_align == 0 || arg2_align == 0)
4791 return NULL_RTX;
4793 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4794 arg1 = builtin_save_expr (arg1);
4795 arg2 = builtin_save_expr (arg2);
4797 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4798 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4800 /* Try to call cmpstrsi. */
4801 if (cmpstr_icode != CODE_FOR_nothing)
4802 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4803 MIN (arg1_align, arg2_align));
4805 /* Try to determine at least one length and call cmpstrnsi. */
4806 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4808 tree len;
4809 rtx arg3_rtx;
4811 tree len1 = c_strlen (arg1, 1);
4812 tree len2 = c_strlen (arg2, 1);
4814 if (len1)
4815 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4816 if (len2)
4817 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4819 /* If we don't have a constant length for the first, use the length
4820 of the second, if we know it. We don't require a constant for
4821 this case; some cost analysis could be done if both are available
4822 but neither is constant. For now, assume they're equally cheap,
4823 unless one has side effects. If both strings have constant lengths,
4824 use the smaller. */
4826 if (!len1)
4827 len = len2;
4828 else if (!len2)
4829 len = len1;
4830 else if (TREE_SIDE_EFFECTS (len1))
4831 len = len2;
4832 else if (TREE_SIDE_EFFECTS (len2))
4833 len = len1;
4834 else if (TREE_CODE (len1) != INTEGER_CST)
4835 len = len2;
4836 else if (TREE_CODE (len2) != INTEGER_CST)
4837 len = len1;
4838 else if (tree_int_cst_lt (len1, len2))
4839 len = len1;
4840 else
4841 len = len2;
4843 /* If both arguments have side effects, we cannot optimize. */
4844 if (len && !TREE_SIDE_EFFECTS (len))
4846 arg3_rtx = expand_normal (len);
4847 result = expand_cmpstrn_or_cmpmem
4848 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4849 arg3_rtx, MIN (arg1_align, arg2_align));
4853 tree fndecl = get_callee_fndecl (exp);
4854 if (result)
4856 /* Return the value in the proper mode for this function. */
4857 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4858 if (GET_MODE (result) == mode)
4859 return result;
4860 if (target == 0)
4861 return convert_to_mode (mode, result, 0);
4862 convert_move (target, result, 0);
4863 return target;
4866 /* Expand the library call ourselves using a stabilized argument
4867 list to avoid re-evaluating the function's arguments twice. */
4868 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4869 copy_warning (fn, exp);
4870 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4871 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4872 return expand_call (fn, target, target == const0_rtx);
4875 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4876 NULL_RTX if we failed the caller should emit a normal call, otherwise
4877 try to get the result in TARGET, if convenient. */
4879 static rtx
4880 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4881 ATTRIBUTE_UNUSED machine_mode mode)
4883 if (!validate_arglist (exp,
4884 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4885 return NULL_RTX;
4887 tree arg1 = CALL_EXPR_ARG (exp, 0);
4888 tree arg2 = CALL_EXPR_ARG (exp, 1);
4889 tree arg3 = CALL_EXPR_ARG (exp, 2);
4891 location_t loc = EXPR_LOCATION (exp);
4892 tree len1 = c_strlen (arg1, 1);
4893 tree len2 = c_strlen (arg2, 1);
4895 /* Due to the performance benefit, always inline the calls first. */
4896 rtx result = NULL_RTX;
4897 result = inline_expand_builtin_bytecmp (exp, target);
4898 if (result)
4899 return result;
4901 /* If c_strlen can determine an expression for one of the string
4902 lengths, and it doesn't have side effects, then emit cmpstrnsi
4903 using length MIN(strlen(string)+1, arg3). */
4904 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4905 if (cmpstrn_icode == CODE_FOR_nothing)
4906 return NULL_RTX;
4908 tree len;
4910 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4911 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4913 if (len1)
4914 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4915 if (len2)
4916 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4918 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4920 /* If we don't have a constant length for the first, use the length
4921 of the second, if we know it. If neither string is constant length,
4922 use the given length argument. We don't require a constant for
4923 this case; some cost analysis could be done if both are available
4924 but neither is constant. For now, assume they're equally cheap,
4925 unless one has side effects. If both strings have constant lengths,
4926 use the smaller. */
4928 if (!len1 && !len2)
4929 len = len3;
4930 else if (!len1)
4931 len = len2;
4932 else if (!len2)
4933 len = len1;
4934 else if (TREE_SIDE_EFFECTS (len1))
4935 len = len2;
4936 else if (TREE_SIDE_EFFECTS (len2))
4937 len = len1;
4938 else if (TREE_CODE (len1) != INTEGER_CST)
4939 len = len2;
4940 else if (TREE_CODE (len2) != INTEGER_CST)
4941 len = len1;
4942 else if (tree_int_cst_lt (len1, len2))
4943 len = len1;
4944 else
4945 len = len2;
4947 /* If we are not using the given length, we must incorporate it here.
4948 The actual new length parameter will be MIN(len,arg3) in this case. */
4949 if (len != len3)
4951 len = fold_convert_loc (loc, sizetype, len);
4952 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4954 rtx arg1_rtx = get_memory_rtx (arg1, len);
4955 rtx arg2_rtx = get_memory_rtx (arg2, len);
4956 rtx arg3_rtx = expand_normal (len);
4957 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4958 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4959 MIN (arg1_align, arg2_align));
4961 tree fndecl = get_callee_fndecl (exp);
4962 if (result)
4964 /* Return the value in the proper mode for this function. */
4965 mode = TYPE_MODE (TREE_TYPE (exp));
4966 if (GET_MODE (result) == mode)
4967 return result;
4968 if (target == 0)
4969 return convert_to_mode (mode, result, 0);
4970 convert_move (target, result, 0);
4971 return target;
4974 /* Expand the library call ourselves using a stabilized argument
4975 list to avoid re-evaluating the function's arguments twice. */
4976 tree call = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4977 copy_warning (call, exp);
4978 gcc_assert (TREE_CODE (call) == CALL_EXPR);
4979 CALL_EXPR_TAILCALL (call) = CALL_EXPR_TAILCALL (exp);
4980 return expand_call (call, target, target == const0_rtx);
4983 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4984 if that's convenient. */
4987 expand_builtin_saveregs (void)
4989 rtx val;
4990 rtx_insn *seq;
4992 /* Don't do __builtin_saveregs more than once in a function.
4993 Save the result of the first call and reuse it. */
4994 if (saveregs_value != 0)
4995 return saveregs_value;
4997 /* When this function is called, it means that registers must be
4998 saved on entry to this function. So we migrate the call to the
4999 first insn of this function. */
5001 start_sequence ();
5003 /* Do whatever the machine needs done in this case. */
5004 val = targetm.calls.expand_builtin_saveregs ();
5006 seq = get_insns ();
5007 end_sequence ();
5009 saveregs_value = val;
5011 /* Put the insns after the NOTE that starts the function. If this
5012 is inside a start_sequence, make the outer-level insn chain current, so
5013 the code is placed at the start of the function. */
5014 push_topmost_sequence ();
5015 emit_insn_after (seq, entry_of_function ());
5016 pop_topmost_sequence ();
5018 return val;
5021 /* Expand a call to __builtin_next_arg. */
5023 static rtx
5024 expand_builtin_next_arg (void)
5026 /* Checking arguments is already done in fold_builtin_next_arg
5027 that must be called before this function. */
5028 return expand_binop (ptr_mode, add_optab,
5029 crtl->args.internal_arg_pointer,
5030 crtl->args.arg_offset_rtx,
5031 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5034 /* Make it easier for the backends by protecting the valist argument
5035 from multiple evaluations. */
5037 static tree
5038 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
5040 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
5042 /* The current way of determining the type of valist is completely
5043 bogus. We should have the information on the va builtin instead. */
5044 if (!vatype)
5045 vatype = targetm.fn_abi_va_list (cfun->decl);
5047 if (TREE_CODE (vatype) == ARRAY_TYPE)
5049 if (TREE_SIDE_EFFECTS (valist))
5050 valist = save_expr (valist);
5052 /* For this case, the backends will be expecting a pointer to
5053 vatype, but it's possible we've actually been given an array
5054 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
5055 So fix it. */
5056 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5058 tree p1 = build_pointer_type (TREE_TYPE (vatype));
5059 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
5062 else
5064 tree pt = build_pointer_type (vatype);
5066 if (! needs_lvalue)
5068 if (! TREE_SIDE_EFFECTS (valist))
5069 return valist;
5071 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
5072 TREE_SIDE_EFFECTS (valist) = 1;
5075 if (TREE_SIDE_EFFECTS (valist))
5076 valist = save_expr (valist);
5077 valist = fold_build2_loc (loc, MEM_REF,
5078 vatype, valist, build_int_cst (pt, 0));
5081 return valist;
5084 /* The "standard" definition of va_list is void*. */
5086 tree
5087 std_build_builtin_va_list (void)
5089 return ptr_type_node;
5092 /* The "standard" abi va_list is va_list_type_node. */
5094 tree
5095 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
5097 return va_list_type_node;
5100 /* The "standard" type of va_list is va_list_type_node. */
5102 tree
5103 std_canonical_va_list_type (tree type)
5105 tree wtype, htype;
5107 wtype = va_list_type_node;
5108 htype = type;
5110 if (TREE_CODE (wtype) == ARRAY_TYPE)
5112 /* If va_list is an array type, the argument may have decayed
5113 to a pointer type, e.g. by being passed to another function.
5114 In that case, unwrap both types so that we can compare the
5115 underlying records. */
5116 if (TREE_CODE (htype) == ARRAY_TYPE
5117 || POINTER_TYPE_P (htype))
5119 wtype = TREE_TYPE (wtype);
5120 htype = TREE_TYPE (htype);
5123 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
5124 return va_list_type_node;
5126 return NULL_TREE;
5129 /* The "standard" implementation of va_start: just assign `nextarg' to
5130 the variable. */
5132 void
5133 std_expand_builtin_va_start (tree valist, rtx nextarg)
5135 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
5136 convert_move (va_r, nextarg, 0);
5139 /* Expand EXP, a call to __builtin_va_start. */
5141 static rtx
5142 expand_builtin_va_start (tree exp)
5144 rtx nextarg;
5145 tree valist;
5146 location_t loc = EXPR_LOCATION (exp);
5148 if (call_expr_nargs (exp) < 2)
5150 error_at (loc, "too few arguments to function %<va_start%>");
5151 return const0_rtx;
5154 if (fold_builtin_next_arg (exp, true))
5155 return const0_rtx;
5157 nextarg = expand_builtin_next_arg ();
5158 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
5160 if (targetm.expand_builtin_va_start)
5161 targetm.expand_builtin_va_start (valist, nextarg);
5162 else
5163 std_expand_builtin_va_start (valist, nextarg);
5165 return const0_rtx;
5168 /* Expand EXP, a call to __builtin_va_end. */
5170 static rtx
5171 expand_builtin_va_end (tree exp)
5173 tree valist = CALL_EXPR_ARG (exp, 0);
5175 /* Evaluate for side effects, if needed. I hate macros that don't
5176 do that. */
5177 if (TREE_SIDE_EFFECTS (valist))
5178 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5180 return const0_rtx;
5183 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5184 builtin rather than just as an assignment in stdarg.h because of the
5185 nastiness of array-type va_list types. */
5187 static rtx
5188 expand_builtin_va_copy (tree exp)
5190 tree dst, src, t;
5191 location_t loc = EXPR_LOCATION (exp);
5193 dst = CALL_EXPR_ARG (exp, 0);
5194 src = CALL_EXPR_ARG (exp, 1);
5196 dst = stabilize_va_list_loc (loc, dst, 1);
5197 src = stabilize_va_list_loc (loc, src, 0);
5199 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5201 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5203 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5204 TREE_SIDE_EFFECTS (t) = 1;
5205 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5207 else
5209 rtx dstb, srcb, size;
5211 /* Evaluate to pointers. */
5212 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5213 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5214 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5215 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5217 dstb = convert_memory_address (Pmode, dstb);
5218 srcb = convert_memory_address (Pmode, srcb);
5220 /* "Dereference" to BLKmode memories. */
5221 dstb = gen_rtx_MEM (BLKmode, dstb);
5222 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5223 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5224 srcb = gen_rtx_MEM (BLKmode, srcb);
5225 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5226 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5228 /* Copy. */
5229 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5232 return const0_rtx;
5235 /* Expand a call to one of the builtin functions __builtin_frame_address or
5236 __builtin_return_address. */
5238 static rtx
5239 expand_builtin_frame_address (tree fndecl, tree exp)
5241 /* The argument must be a nonnegative integer constant.
5242 It counts the number of frames to scan up the stack.
5243 The value is either the frame pointer value or the return
5244 address saved in that frame. */
5245 if (call_expr_nargs (exp) == 0)
5246 /* Warning about missing arg was already issued. */
5247 return const0_rtx;
5248 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5250 error ("invalid argument to %qD", fndecl);
5251 return const0_rtx;
5253 else
5255 /* Number of frames to scan up the stack. */
5256 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5258 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5260 /* Some ports cannot access arbitrary stack frames. */
5261 if (tem == NULL)
5263 warning (0, "unsupported argument to %qD", fndecl);
5264 return const0_rtx;
5267 if (count)
5269 /* Warn since no effort is made to ensure that any frame
5270 beyond the current one exists or can be safely reached. */
5271 warning (OPT_Wframe_address, "calling %qD with "
5272 "a nonzero argument is unsafe", fndecl);
5275 /* For __builtin_frame_address, return what we've got. */
5276 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5277 return tem;
5279 if (!REG_P (tem)
5280 && ! CONSTANT_P (tem))
5281 tem = copy_addr_to_reg (tem);
5282 return tem;
5286 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5287 failed and the caller should emit a normal call. */
5289 static rtx
5290 expand_builtin_alloca (tree exp)
5292 rtx op0;
5293 rtx result;
5294 unsigned int align;
5295 tree fndecl = get_callee_fndecl (exp);
5296 HOST_WIDE_INT max_size;
5297 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5298 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5299 bool valid_arglist
5300 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5301 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5302 VOID_TYPE)
5303 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5304 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5305 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5307 if (!valid_arglist)
5308 return NULL_RTX;
5310 /* Compute the argument. */
5311 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5313 /* Compute the alignment. */
5314 align = (fcode == BUILT_IN_ALLOCA
5315 ? BIGGEST_ALIGNMENT
5316 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5318 /* Compute the maximum size. */
5319 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5320 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5321 : -1);
5323 /* Allocate the desired space. If the allocation stems from the declaration
5324 of a variable-sized object, it cannot accumulate. */
5325 result
5326 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5327 result = convert_memory_address (ptr_mode, result);
5329 /* Dynamic allocations for variables are recorded during gimplification. */
5330 if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
5331 record_dynamic_alloc (exp);
5333 return result;
5336 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5337 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5338 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5339 handle_builtin_stack_restore function. */
5341 static rtx
5342 expand_asan_emit_allocas_unpoison (tree exp)
5344 tree arg0 = CALL_EXPR_ARG (exp, 0);
5345 tree arg1 = CALL_EXPR_ARG (exp, 1);
5346 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5347 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5348 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5349 stack_pointer_rtx, NULL_RTX, 0,
5350 OPTAB_LIB_WIDEN);
5351 off = convert_modes (ptr_mode, Pmode, off, 0);
5352 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5353 OPTAB_LIB_WIDEN);
5354 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5355 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5356 top, ptr_mode, bot, ptr_mode);
5357 return ret;
5360 /* Expand a call to bswap builtin in EXP.
5361 Return NULL_RTX if a normal call should be emitted rather than expanding the
5362 function in-line. If convenient, the result should be placed in TARGET.
5363 SUBTARGET may be used as the target for computing one of EXP's operands. */
5365 static rtx
5366 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5367 rtx subtarget)
5369 tree arg;
5370 rtx op0;
5372 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5373 return NULL_RTX;
5375 arg = CALL_EXPR_ARG (exp, 0);
5376 op0 = expand_expr (arg,
5377 subtarget && GET_MODE (subtarget) == target_mode
5378 ? subtarget : NULL_RTX,
5379 target_mode, EXPAND_NORMAL);
5380 if (GET_MODE (op0) != target_mode)
5381 op0 = convert_to_mode (target_mode, op0, 1);
5383 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5385 gcc_assert (target);
5387 return convert_to_mode (target_mode, target, 1);
5390 /* Expand a call to a unary builtin in EXP.
5391 Return NULL_RTX if a normal call should be emitted rather than expanding the
5392 function in-line. If convenient, the result should be placed in TARGET.
5393 SUBTARGET may be used as the target for computing one of EXP's operands. */
5395 static rtx
5396 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5397 rtx subtarget, optab op_optab)
5399 rtx op0;
5401 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5402 return NULL_RTX;
5404 /* Compute the argument. */
5405 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5406 (subtarget
5407 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5408 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5409 VOIDmode, EXPAND_NORMAL);
5410 /* Compute op, into TARGET if possible.
5411 Set TARGET to wherever the result comes back. */
5412 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5413 op_optab, op0, target, op_optab != clrsb_optab);
5414 gcc_assert (target);
5416 return convert_to_mode (target_mode, target, 0);
5419 /* Expand a call to __builtin_expect. We just return our argument
5420 as the builtin_expect semantic should've been already executed by
5421 tree branch prediction pass. */
5423 static rtx
5424 expand_builtin_expect (tree exp, rtx target)
5426 tree arg;
5428 if (call_expr_nargs (exp) < 2)
5429 return const0_rtx;
5430 arg = CALL_EXPR_ARG (exp, 0);
5432 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5433 /* When guessing was done, the hints should be already stripped away. */
5434 gcc_assert (!flag_guess_branch_prob
5435 || optimize == 0 || seen_error ());
5436 return target;
5439 /* Expand a call to __builtin_expect_with_probability. We just return our
5440 argument as the builtin_expect semantic should've been already executed by
5441 tree branch prediction pass. */
5443 static rtx
5444 expand_builtin_expect_with_probability (tree exp, rtx target)
5446 tree arg;
5448 if (call_expr_nargs (exp) < 3)
5449 return const0_rtx;
5450 arg = CALL_EXPR_ARG (exp, 0);
5452 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5453 /* When guessing was done, the hints should be already stripped away. */
5454 gcc_assert (!flag_guess_branch_prob
5455 || optimize == 0 || seen_error ());
5456 return target;
5460 /* Expand a call to __builtin_assume_aligned. We just return our first
5461 argument as the builtin_assume_aligned semantic should've been already
5462 executed by CCP. */
5464 static rtx
5465 expand_builtin_assume_aligned (tree exp, rtx target)
5467 if (call_expr_nargs (exp) < 2)
5468 return const0_rtx;
5469 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5470 EXPAND_NORMAL);
5471 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5472 && (call_expr_nargs (exp) < 3
5473 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5474 return target;
5477 void
5478 expand_builtin_trap (void)
5480 if (targetm.have_trap ())
5482 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5483 /* For trap insns when not accumulating outgoing args force
5484 REG_ARGS_SIZE note to prevent crossjumping of calls with
5485 different args sizes. */
5486 if (!ACCUMULATE_OUTGOING_ARGS)
5487 add_args_size_note (insn, stack_pointer_delta);
5489 else
5491 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5492 tree call_expr = build_call_expr (fn, 0);
5493 expand_call (call_expr, NULL_RTX, false);
5496 emit_barrier ();
5499 /* Expand a call to __builtin_unreachable. We do nothing except emit
5500 a barrier saying that control flow will not pass here.
5502 It is the responsibility of the program being compiled to ensure
5503 that control flow does never reach __builtin_unreachable. */
5504 static void
5505 expand_builtin_unreachable (void)
5507 /* Use gimple_build_builtin_unreachable or builtin_decl_unreachable
5508 to avoid this. */
5509 gcc_checking_assert (!sanitize_flags_p (SANITIZE_UNREACHABLE));
5510 emit_barrier ();
5513 /* Expand EXP, a call to fabs, fabsf or fabsl.
5514 Return NULL_RTX if a normal call should be emitted rather than expanding
5515 the function inline. If convenient, the result should be placed
5516 in TARGET. SUBTARGET may be used as the target for computing
5517 the operand. */
5519 static rtx
5520 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5522 machine_mode mode;
5523 tree arg;
5524 rtx op0;
5526 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5527 return NULL_RTX;
5529 arg = CALL_EXPR_ARG (exp, 0);
5530 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5531 mode = TYPE_MODE (TREE_TYPE (arg));
5532 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5533 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5536 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5537 Return NULL is a normal call should be emitted rather than expanding the
5538 function inline. If convenient, the result should be placed in TARGET.
5539 SUBTARGET may be used as the target for computing the operand. */
5541 static rtx
5542 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5544 rtx op0, op1;
5545 tree arg;
5547 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5548 return NULL_RTX;
5550 arg = CALL_EXPR_ARG (exp, 0);
5551 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5553 arg = CALL_EXPR_ARG (exp, 1);
5554 op1 = expand_normal (arg);
5556 return expand_copysign (op0, op1, target);
5559 /* Emit a call to __builtin___clear_cache. */
5561 void
5562 default_emit_call_builtin___clear_cache (rtx begin, rtx end)
5564 rtx callee = gen_rtx_SYMBOL_REF (Pmode,
5565 BUILTIN_ASM_NAME_PTR
5566 (BUILT_IN_CLEAR_CACHE));
5568 emit_library_call (callee,
5569 LCT_NORMAL, VOIDmode,
5570 convert_memory_address (ptr_mode, begin), ptr_mode,
5571 convert_memory_address (ptr_mode, end), ptr_mode);
5574 /* Emit a call to __builtin___clear_cache, unless the target specifies
5575 it as do-nothing. This function can be used by trampoline
5576 finalizers to duplicate the effects of expanding a call to the
5577 clear_cache builtin. */
5579 void
5580 maybe_emit_call_builtin___clear_cache (rtx begin, rtx end)
5582 gcc_assert ((GET_MODE (begin) == ptr_mode || GET_MODE (begin) == Pmode
5583 || CONST_INT_P (begin))
5584 && (GET_MODE (end) == ptr_mode || GET_MODE (end) == Pmode
5585 || CONST_INT_P (end)));
5587 if (targetm.have_clear_cache ())
5589 /* We have a "clear_cache" insn, and it will handle everything. */
5590 class expand_operand ops[2];
5592 create_address_operand (&ops[0], begin);
5593 create_address_operand (&ops[1], end);
5595 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5596 return;
5598 else
5600 #ifndef CLEAR_INSN_CACHE
5601 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5602 does nothing. There is no need to call it. Do nothing. */
5603 return;
5604 #endif /* CLEAR_INSN_CACHE */
5607 targetm.calls.emit_call_builtin___clear_cache (begin, end);
5610 /* Expand a call to __builtin___clear_cache. */
5612 static void
5613 expand_builtin___clear_cache (tree exp)
5615 tree begin, end;
5616 rtx begin_rtx, end_rtx;
5618 /* We must not expand to a library call. If we did, any
5619 fallback library function in libgcc that might contain a call to
5620 __builtin___clear_cache() would recurse infinitely. */
5621 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5623 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5624 return;
5627 begin = CALL_EXPR_ARG (exp, 0);
5628 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5630 end = CALL_EXPR_ARG (exp, 1);
5631 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5633 maybe_emit_call_builtin___clear_cache (begin_rtx, end_rtx);
5636 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5638 static rtx
5639 round_trampoline_addr (rtx tramp)
5641 rtx temp, addend, mask;
5643 /* If we don't need too much alignment, we'll have been guaranteed
5644 proper alignment by get_trampoline_type. */
5645 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5646 return tramp;
5648 /* Round address up to desired boundary. */
5649 temp = gen_reg_rtx (Pmode);
5650 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5651 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5653 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5654 temp, 0, OPTAB_LIB_WIDEN);
5655 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5656 temp, 0, OPTAB_LIB_WIDEN);
5658 return tramp;
5661 static rtx
5662 expand_builtin_init_trampoline (tree exp, bool onstack)
5664 tree t_tramp, t_func, t_chain;
5665 rtx m_tramp, r_tramp, r_chain, tmp;
5667 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5668 POINTER_TYPE, VOID_TYPE))
5669 return NULL_RTX;
5671 t_tramp = CALL_EXPR_ARG (exp, 0);
5672 t_func = CALL_EXPR_ARG (exp, 1);
5673 t_chain = CALL_EXPR_ARG (exp, 2);
5675 r_tramp = expand_normal (t_tramp);
5676 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5677 MEM_NOTRAP_P (m_tramp) = 1;
5679 /* If ONSTACK, the TRAMP argument should be the address of a field
5680 within the local function's FRAME decl. Either way, let's see if
5681 we can fill in the MEM_ATTRs for this memory. */
5682 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5683 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5685 /* Creator of a heap trampoline is responsible for making sure the
5686 address is aligned to at least STACK_BOUNDARY. Normally malloc
5687 will ensure this anyhow. */
5688 tmp = round_trampoline_addr (r_tramp);
5689 if (tmp != r_tramp)
5691 m_tramp = change_address (m_tramp, BLKmode, tmp);
5692 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5693 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5696 /* The FUNC argument should be the address of the nested function.
5697 Extract the actual function decl to pass to the hook. */
5698 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5699 t_func = TREE_OPERAND (t_func, 0);
5700 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5702 r_chain = expand_normal (t_chain);
5704 /* Generate insns to initialize the trampoline. */
5705 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5707 if (onstack)
5709 trampolines_created = 1;
5711 if (targetm.calls.custom_function_descriptors != 0)
5712 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5713 "trampoline generated for nested function %qD", t_func);
5716 return const0_rtx;
5719 static rtx
5720 expand_builtin_adjust_trampoline (tree exp)
5722 rtx tramp;
5724 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5725 return NULL_RTX;
5727 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5728 tramp = round_trampoline_addr (tramp);
5729 if (targetm.calls.trampoline_adjust_address)
5730 tramp = targetm.calls.trampoline_adjust_address (tramp);
5732 return tramp;
5735 /* Expand a call to the builtin descriptor initialization routine.
5736 A descriptor is made up of a couple of pointers to the static
5737 chain and the code entry in this order. */
5739 static rtx
5740 expand_builtin_init_descriptor (tree exp)
5742 tree t_descr, t_func, t_chain;
5743 rtx m_descr, r_descr, r_func, r_chain;
5745 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5746 VOID_TYPE))
5747 return NULL_RTX;
5749 t_descr = CALL_EXPR_ARG (exp, 0);
5750 t_func = CALL_EXPR_ARG (exp, 1);
5751 t_chain = CALL_EXPR_ARG (exp, 2);
5753 r_descr = expand_normal (t_descr);
5754 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5755 MEM_NOTRAP_P (m_descr) = 1;
5756 set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
5758 r_func = expand_normal (t_func);
5759 r_chain = expand_normal (t_chain);
5761 /* Generate insns to initialize the descriptor. */
5762 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5763 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5764 POINTER_SIZE / BITS_PER_UNIT), r_func);
5766 return const0_rtx;
5769 /* Expand a call to the builtin descriptor adjustment routine. */
5771 static rtx
5772 expand_builtin_adjust_descriptor (tree exp)
5774 rtx tramp;
5776 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5777 return NULL_RTX;
5779 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5781 /* Unalign the descriptor to allow runtime identification. */
5782 tramp = plus_constant (ptr_mode, tramp,
5783 targetm.calls.custom_function_descriptors);
5785 return force_operand (tramp, NULL_RTX);
5788 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5789 function. The function first checks whether the back end provides
5790 an insn to implement signbit for the respective mode. If not, it
5791 checks whether the floating point format of the value is such that
5792 the sign bit can be extracted. If that is not the case, error out.
5793 EXP is the expression that is a call to the builtin function; if
5794 convenient, the result should be placed in TARGET. */
5795 static rtx
5796 expand_builtin_signbit (tree exp, rtx target)
5798 const struct real_format *fmt;
5799 scalar_float_mode fmode;
5800 scalar_int_mode rmode, imode;
5801 tree arg;
5802 int word, bitpos;
5803 enum insn_code icode;
5804 rtx temp;
5805 location_t loc = EXPR_LOCATION (exp);
5807 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5808 return NULL_RTX;
5810 arg = CALL_EXPR_ARG (exp, 0);
5811 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5812 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5813 fmt = REAL_MODE_FORMAT (fmode);
5815 arg = builtin_save_expr (arg);
5817 /* Expand the argument yielding a RTX expression. */
5818 temp = expand_normal (arg);
5820 /* Check if the back end provides an insn that handles signbit for the
5821 argument's mode. */
5822 icode = optab_handler (signbit_optab, fmode);
5823 if (icode != CODE_FOR_nothing)
5825 rtx_insn *last = get_last_insn ();
5826 rtx this_target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5827 if (maybe_emit_unop_insn (icode, this_target, temp, UNKNOWN))
5828 return this_target;
5829 delete_insns_since (last);
5832 /* For floating point formats without a sign bit, implement signbit
5833 as "ARG < 0.0". */
5834 bitpos = fmt->signbit_ro;
5835 if (bitpos < 0)
5837 /* But we can't do this if the format supports signed zero. */
5838 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5840 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5841 build_real (TREE_TYPE (arg), dconst0));
5842 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5845 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5847 imode = int_mode_for_mode (fmode).require ();
5848 temp = gen_lowpart (imode, temp);
5850 else
5852 imode = word_mode;
5853 /* Handle targets with different FP word orders. */
5854 if (FLOAT_WORDS_BIG_ENDIAN)
5855 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5856 else
5857 word = bitpos / BITS_PER_WORD;
5858 temp = operand_subword_force (temp, word, fmode);
5859 bitpos = bitpos % BITS_PER_WORD;
5862 /* Force the intermediate word_mode (or narrower) result into a
5863 register. This avoids attempting to create paradoxical SUBREGs
5864 of floating point modes below. */
5865 temp = force_reg (imode, temp);
5867 /* If the bitpos is within the "result mode" lowpart, the operation
5868 can be implement with a single bitwise AND. Otherwise, we need
5869 a right shift and an AND. */
5871 if (bitpos < GET_MODE_BITSIZE (rmode))
5873 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5875 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5876 temp = gen_lowpart (rmode, temp);
5877 temp = expand_binop (rmode, and_optab, temp,
5878 immed_wide_int_const (mask, rmode),
5879 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5881 else
5883 /* Perform a logical right shift to place the signbit in the least
5884 significant bit, then truncate the result to the desired mode
5885 and mask just this bit. */
5886 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5887 temp = gen_lowpart (rmode, temp);
5888 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5889 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5892 return temp;
5895 /* Expand fork or exec calls. TARGET is the desired target of the
5896 call. EXP is the call. FN is the
5897 identificator of the actual function. IGNORE is nonzero if the
5898 value is to be ignored. */
5900 static rtx
5901 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5903 tree id, decl;
5904 tree call;
5906 /* If we are not profiling, just call the function. */
5907 if (!profile_arc_flag)
5908 return NULL_RTX;
5910 /* Otherwise call the wrapper. This should be equivalent for the rest of
5911 compiler, so the code does not diverge, and the wrapper may run the
5912 code necessary for keeping the profiling sane. */
5914 switch (DECL_FUNCTION_CODE (fn))
5916 case BUILT_IN_FORK:
5917 id = get_identifier ("__gcov_fork");
5918 break;
5920 case BUILT_IN_EXECL:
5921 id = get_identifier ("__gcov_execl");
5922 break;
5924 case BUILT_IN_EXECV:
5925 id = get_identifier ("__gcov_execv");
5926 break;
5928 case BUILT_IN_EXECLP:
5929 id = get_identifier ("__gcov_execlp");
5930 break;
5932 case BUILT_IN_EXECLE:
5933 id = get_identifier ("__gcov_execle");
5934 break;
5936 case BUILT_IN_EXECVP:
5937 id = get_identifier ("__gcov_execvp");
5938 break;
5940 case BUILT_IN_EXECVE:
5941 id = get_identifier ("__gcov_execve");
5942 break;
5944 default:
5945 gcc_unreachable ();
5948 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5949 FUNCTION_DECL, id, TREE_TYPE (fn));
5950 DECL_EXTERNAL (decl) = 1;
5951 TREE_PUBLIC (decl) = 1;
5952 DECL_ARTIFICIAL (decl) = 1;
5953 TREE_NOTHROW (decl) = 1;
5954 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5955 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5956 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5957 return expand_call (call, target, ignore);
5962 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5963 the pointer in these functions is void*, the tree optimizers may remove
5964 casts. The mode computed in expand_builtin isn't reliable either, due
5965 to __sync_bool_compare_and_swap.
5967 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5968 group of builtins. This gives us log2 of the mode size. */
5970 static inline machine_mode
5971 get_builtin_sync_mode (int fcode_diff)
5973 /* The size is not negotiable, so ask not to get BLKmode in return
5974 if the target indicates that a smaller size would be better. */
5975 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5978 /* Expand the memory expression LOC and return the appropriate memory operand
5979 for the builtin_sync operations. */
5981 static rtx
5982 get_builtin_sync_mem (tree loc, machine_mode mode)
5984 rtx addr, mem;
5985 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
5986 ? TREE_TYPE (TREE_TYPE (loc))
5987 : TREE_TYPE (loc));
5988 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
5990 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
5991 addr = convert_memory_address (addr_mode, addr);
5993 /* Note that we explicitly do not want any alias information for this
5994 memory, so that we kill all other live memories. Otherwise we don't
5995 satisfy the full barrier semantics of the intrinsic. */
5996 mem = gen_rtx_MEM (mode, addr);
5998 set_mem_addr_space (mem, addr_space);
6000 mem = validize_mem (mem);
6002 /* The alignment needs to be at least according to that of the mode. */
6003 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
6004 get_pointer_alignment (loc)));
6005 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6006 MEM_VOLATILE_P (mem) = 1;
6008 return mem;
6011 /* Make sure an argument is in the right mode.
6012 EXP is the tree argument.
6013 MODE is the mode it should be in. */
6015 static rtx
6016 expand_expr_force_mode (tree exp, machine_mode mode)
6018 rtx val;
6019 machine_mode old_mode;
6021 if (TREE_CODE (exp) == SSA_NAME
6022 && TYPE_MODE (TREE_TYPE (exp)) != mode)
6024 /* Undo argument promotion if possible, as combine might not
6025 be able to do it later due to MEM_VOLATILE_P uses in the
6026 patterns. */
6027 gimple *g = get_gimple_for_ssa_name (exp);
6028 if (g && gimple_assign_cast_p (g))
6030 tree rhs = gimple_assign_rhs1 (g);
6031 tree_code code = gimple_assign_rhs_code (g);
6032 if (CONVERT_EXPR_CODE_P (code)
6033 && TYPE_MODE (TREE_TYPE (rhs)) == mode
6034 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
6035 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
6036 && (TYPE_PRECISION (TREE_TYPE (exp))
6037 > TYPE_PRECISION (TREE_TYPE (rhs))))
6038 exp = rhs;
6042 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
6043 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6044 of CONST_INTs, where we know the old_mode only from the call argument. */
6046 old_mode = GET_MODE (val);
6047 if (old_mode == VOIDmode)
6048 old_mode = TYPE_MODE (TREE_TYPE (exp));
6049 val = convert_modes (mode, old_mode, val, 1);
6050 return val;
6054 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6055 EXP is the CALL_EXPR. CODE is the rtx code
6056 that corresponds to the arithmetic or logical operation from the name;
6057 an exception here is that NOT actually means NAND. TARGET is an optional
6058 place for us to store the results; AFTER is true if this is the
6059 fetch_and_xxx form. */
6061 static rtx
6062 expand_builtin_sync_operation (machine_mode mode, tree exp,
6063 enum rtx_code code, bool after,
6064 rtx target)
6066 rtx val, mem;
6067 location_t loc = EXPR_LOCATION (exp);
6069 if (code == NOT && warn_sync_nand)
6071 tree fndecl = get_callee_fndecl (exp);
6072 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6074 static bool warned_f_a_n, warned_n_a_f;
6076 switch (fcode)
6078 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6079 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6080 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6081 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6082 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6083 if (warned_f_a_n)
6084 break;
6086 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
6087 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6088 warned_f_a_n = true;
6089 break;
6091 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6092 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6093 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6094 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6095 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6096 if (warned_n_a_f)
6097 break;
6099 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
6100 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6101 warned_n_a_f = true;
6102 break;
6104 default:
6105 gcc_unreachable ();
6109 /* Expand the operands. */
6110 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6111 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6113 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
6114 after);
6117 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6118 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6119 true if this is the boolean form. TARGET is a place for us to store the
6120 results; this is NOT optional if IS_BOOL is true. */
6122 static rtx
6123 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
6124 bool is_bool, rtx target)
6126 rtx old_val, new_val, mem;
6127 rtx *pbool, *poval;
6129 /* Expand the operands. */
6130 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6131 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6132 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6134 pbool = poval = NULL;
6135 if (target != const0_rtx)
6137 if (is_bool)
6138 pbool = &target;
6139 else
6140 poval = &target;
6142 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
6143 false, MEMMODEL_SYNC_SEQ_CST,
6144 MEMMODEL_SYNC_SEQ_CST))
6145 return NULL_RTX;
6147 return target;
6150 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6151 general form is actually an atomic exchange, and some targets only
6152 support a reduced form with the second argument being a constant 1.
6153 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6154 the results. */
6156 static rtx
6157 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
6158 rtx target)
6160 rtx val, mem;
6162 /* Expand the operands. */
6163 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6164 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6166 return expand_sync_lock_test_and_set (target, mem, val);
6169 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6171 static void
6172 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
6174 rtx mem;
6176 /* Expand the operands. */
6177 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6179 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
6182 /* Given an integer representing an ``enum memmodel'', verify its
6183 correctness and return the memory model enum. */
6185 static enum memmodel
6186 get_memmodel (tree exp)
6188 /* If the parameter is not a constant, it's a run time value so we'll just
6189 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6190 if (TREE_CODE (exp) != INTEGER_CST)
6191 return MEMMODEL_SEQ_CST;
6193 rtx op = expand_normal (exp);
6195 unsigned HOST_WIDE_INT val = INTVAL (op);
6196 if (targetm.memmodel_check)
6197 val = targetm.memmodel_check (val);
6198 else if (val & ~MEMMODEL_MASK)
6199 return MEMMODEL_SEQ_CST;
6201 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6202 if (memmodel_base (val) >= MEMMODEL_LAST)
6203 return MEMMODEL_SEQ_CST;
6205 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6206 be conservative and promote consume to acquire. */
6207 if (val == MEMMODEL_CONSUME)
6208 val = MEMMODEL_ACQUIRE;
6210 return (enum memmodel) val;
6213 /* Expand the __atomic_exchange intrinsic:
6214 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6215 EXP is the CALL_EXPR.
6216 TARGET is an optional place for us to store the results. */
6218 static rtx
6219 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
6221 rtx val, mem;
6222 enum memmodel model;
6224 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6226 if (!flag_inline_atomics)
6227 return NULL_RTX;
6229 /* Expand the operands. */
6230 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6231 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6233 return expand_atomic_exchange (target, mem, val, model);
6236 /* Expand the __atomic_compare_exchange intrinsic:
6237 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6238 TYPE desired, BOOL weak,
6239 enum memmodel success,
6240 enum memmodel failure)
6241 EXP is the CALL_EXPR.
6242 TARGET is an optional place for us to store the results. */
6244 static rtx
6245 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
6246 rtx target)
6248 rtx expect, desired, mem, oldval;
6249 rtx_code_label *label;
6250 tree weak;
6251 bool is_weak;
6253 memmodel success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6254 memmodel failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6256 if (failure > success)
6257 success = MEMMODEL_SEQ_CST;
6259 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6261 failure = MEMMODEL_SEQ_CST;
6262 success = MEMMODEL_SEQ_CST;
6266 if (!flag_inline_atomics)
6267 return NULL_RTX;
6269 /* Expand the operands. */
6270 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6272 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6273 expect = convert_memory_address (Pmode, expect);
6274 expect = gen_rtx_MEM (mode, expect);
6275 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6277 weak = CALL_EXPR_ARG (exp, 3);
6278 is_weak = false;
6279 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6280 is_weak = true;
6282 if (target == const0_rtx)
6283 target = NULL;
6285 /* Lest the rtl backend create a race condition with an imporoper store
6286 to memory, always create a new pseudo for OLDVAL. */
6287 oldval = NULL;
6289 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6290 is_weak, success, failure))
6291 return NULL_RTX;
6293 /* Conditionally store back to EXPECT, lest we create a race condition
6294 with an improper store to memory. */
6295 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6296 the normal case where EXPECT is totally private, i.e. a register. At
6297 which point the store can be unconditional. */
6298 label = gen_label_rtx ();
6299 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6300 GET_MODE (target), 1, label);
6301 emit_move_insn (expect, oldval);
6302 emit_label (label);
6304 return target;
6307 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6308 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6309 call. The weak parameter must be dropped to match the expected parameter
6310 list and the expected argument changed from value to pointer to memory
6311 slot. */
6313 static void
6314 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6316 unsigned int z;
6317 vec<tree, va_gc> *vec;
6319 vec_alloc (vec, 5);
6320 vec->quick_push (gimple_call_arg (call, 0));
6321 tree expected = gimple_call_arg (call, 1);
6322 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6323 TREE_TYPE (expected));
6324 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6325 if (expd != x)
6326 emit_move_insn (x, expd);
6327 tree v = make_tree (TREE_TYPE (expected), x);
6328 vec->quick_push (build1 (ADDR_EXPR,
6329 build_pointer_type (TREE_TYPE (expected)), v));
6330 vec->quick_push (gimple_call_arg (call, 2));
6331 /* Skip the boolean weak parameter. */
6332 for (z = 4; z < 6; z++)
6333 vec->quick_push (gimple_call_arg (call, z));
6334 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6335 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6336 gcc_assert (bytes_log2 < 5);
6337 built_in_function fncode
6338 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6339 + bytes_log2);
6340 tree fndecl = builtin_decl_explicit (fncode);
6341 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6342 fndecl);
6343 tree exp = build_call_vec (boolean_type_node, fn, vec);
6344 tree lhs = gimple_call_lhs (call);
6345 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6346 if (lhs)
6348 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6349 if (GET_MODE (boolret) != mode)
6350 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6351 x = force_reg (mode, x);
6352 write_complex_part (target, boolret, true, true);
6353 write_complex_part (target, x, false, false);
6357 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6359 void
6360 expand_ifn_atomic_compare_exchange (gcall *call)
6362 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6363 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6364 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6366 memmodel success = get_memmodel (gimple_call_arg (call, 4));
6367 memmodel failure = get_memmodel (gimple_call_arg (call, 5));
6369 if (failure > success)
6370 success = MEMMODEL_SEQ_CST;
6372 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6374 failure = MEMMODEL_SEQ_CST;
6375 success = MEMMODEL_SEQ_CST;
6378 if (!flag_inline_atomics)
6380 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6381 return;
6384 /* Expand the operands. */
6385 rtx mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6387 rtx expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6388 rtx desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6390 bool is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6392 rtx boolret = NULL;
6393 rtx oldval = NULL;
6395 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6396 is_weak, success, failure))
6398 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6399 return;
6402 tree lhs = gimple_call_lhs (call);
6403 if (lhs)
6405 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6406 if (GET_MODE (boolret) != mode)
6407 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6408 write_complex_part (target, boolret, true, true);
6409 write_complex_part (target, oldval, false, false);
6413 /* Expand the __atomic_load intrinsic:
6414 TYPE __atomic_load (TYPE *object, enum memmodel)
6415 EXP is the CALL_EXPR.
6416 TARGET is an optional place for us to store the results. */
6418 static rtx
6419 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6421 memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6422 if (is_mm_release (model) || is_mm_acq_rel (model))
6423 model = MEMMODEL_SEQ_CST;
6425 if (!flag_inline_atomics)
6426 return NULL_RTX;
6428 /* Expand the operand. */
6429 rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6431 return expand_atomic_load (target, mem, model);
6435 /* Expand the __atomic_store intrinsic:
6436 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6437 EXP is the CALL_EXPR.
6438 TARGET is an optional place for us to store the results. */
6440 static rtx
6441 expand_builtin_atomic_store (machine_mode mode, tree exp)
6443 memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6444 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6445 || is_mm_release (model)))
6446 model = MEMMODEL_SEQ_CST;
6448 if (!flag_inline_atomics)
6449 return NULL_RTX;
6451 /* Expand the operands. */
6452 rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6453 rtx val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6455 return expand_atomic_store (mem, val, model, false);
6458 /* Expand the __atomic_fetch_XXX intrinsic:
6459 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6460 EXP is the CALL_EXPR.
6461 TARGET is an optional place for us to store the results.
6462 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6463 FETCH_AFTER is true if returning the result of the operation.
6464 FETCH_AFTER is false if returning the value before the operation.
6465 IGNORE is true if the result is not used.
6466 EXT_CALL is the correct builtin for an external call if this cannot be
6467 resolved to an instruction sequence. */
6469 static rtx
6470 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6471 enum rtx_code code, bool fetch_after,
6472 bool ignore, enum built_in_function ext_call)
6474 rtx val, mem, ret;
6475 enum memmodel model;
6476 tree fndecl;
6477 tree addr;
6479 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6481 /* Expand the operands. */
6482 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6483 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6485 /* Only try generating instructions if inlining is turned on. */
6486 if (flag_inline_atomics)
6488 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6489 if (ret)
6490 return ret;
6493 /* Return if a different routine isn't needed for the library call. */
6494 if (ext_call == BUILT_IN_NONE)
6495 return NULL_RTX;
6497 /* Change the call to the specified function. */
6498 fndecl = get_callee_fndecl (exp);
6499 addr = CALL_EXPR_FN (exp);
6500 STRIP_NOPS (addr);
6502 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6503 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6505 /* If we will emit code after the call, the call cannot be a tail call.
6506 If it is emitted as a tail call, a barrier is emitted after it, and
6507 then all trailing code is removed. */
6508 if (!ignore)
6509 CALL_EXPR_TAILCALL (exp) = 0;
6511 /* Expand the call here so we can emit trailing code. */
6512 ret = expand_call (exp, target, ignore);
6514 /* Replace the original function just in case it matters. */
6515 TREE_OPERAND (addr, 0) = fndecl;
6517 /* Then issue the arithmetic correction to return the right result. */
6518 if (!ignore)
6520 if (code == NOT)
6522 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6523 OPTAB_LIB_WIDEN);
6524 ret = expand_simple_unop (mode, NOT, ret, target, true);
6526 else
6527 ret = expand_simple_binop (mode, code, ret, val, target, true,
6528 OPTAB_LIB_WIDEN);
6530 return ret;
6533 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6535 void
6536 expand_ifn_atomic_bit_test_and (gcall *call)
6538 tree ptr = gimple_call_arg (call, 0);
6539 tree bit = gimple_call_arg (call, 1);
6540 tree flag = gimple_call_arg (call, 2);
6541 tree lhs = gimple_call_lhs (call);
6542 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6543 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6544 enum rtx_code code;
6545 optab optab;
6546 class expand_operand ops[5];
6548 gcc_assert (flag_inline_atomics);
6550 if (gimple_call_num_args (call) == 5)
6551 model = get_memmodel (gimple_call_arg (call, 3));
6553 rtx mem = get_builtin_sync_mem (ptr, mode);
6554 rtx val = expand_expr_force_mode (bit, mode);
6556 switch (gimple_call_internal_fn (call))
6558 case IFN_ATOMIC_BIT_TEST_AND_SET:
6559 code = IOR;
6560 optab = atomic_bit_test_and_set_optab;
6561 break;
6562 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6563 code = XOR;
6564 optab = atomic_bit_test_and_complement_optab;
6565 break;
6566 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6567 code = AND;
6568 optab = atomic_bit_test_and_reset_optab;
6569 break;
6570 default:
6571 gcc_unreachable ();
6574 if (lhs == NULL_TREE)
6576 rtx val2 = expand_simple_binop (mode, ASHIFT, const1_rtx,
6577 val, NULL_RTX, true, OPTAB_DIRECT);
6578 if (code == AND)
6579 val2 = expand_simple_unop (mode, NOT, val2, NULL_RTX, true);
6580 if (expand_atomic_fetch_op (const0_rtx, mem, val2, code, model, false))
6581 return;
6584 rtx target;
6585 if (lhs)
6586 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6587 else
6588 target = gen_reg_rtx (mode);
6589 enum insn_code icode = direct_optab_handler (optab, mode);
6590 gcc_assert (icode != CODE_FOR_nothing);
6591 create_output_operand (&ops[0], target, mode);
6592 create_fixed_operand (&ops[1], mem);
6593 create_convert_operand_to (&ops[2], val, mode, true);
6594 create_integer_operand (&ops[3], model);
6595 create_integer_operand (&ops[4], integer_onep (flag));
6596 if (maybe_expand_insn (icode, 5, ops))
6597 return;
6599 rtx bitval = val;
6600 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6601 val, NULL_RTX, true, OPTAB_DIRECT);
6602 rtx maskval = val;
6603 if (code == AND)
6604 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6605 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6606 code, model, false);
6607 if (!result)
6609 bool is_atomic = gimple_call_num_args (call) == 5;
6610 tree tcall = gimple_call_arg (call, 3 + is_atomic);
6611 tree fndecl = gimple_call_addr_fndecl (tcall);
6612 tree type = TREE_TYPE (TREE_TYPE (fndecl));
6613 tree exp = build_call_nary (type, tcall, 2 + is_atomic, ptr,
6614 make_tree (type, val),
6615 is_atomic
6616 ? gimple_call_arg (call, 3)
6617 : integer_zero_node);
6618 result = expand_builtin (exp, gen_reg_rtx (mode), NULL_RTX,
6619 mode, !lhs);
6621 if (!lhs)
6622 return;
6623 if (integer_onep (flag))
6625 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6626 NULL_RTX, true, OPTAB_DIRECT);
6627 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6628 true, OPTAB_DIRECT);
6630 else
6631 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6632 OPTAB_DIRECT);
6633 if (result != target)
6634 emit_move_insn (target, result);
6637 /* Expand IFN_ATOMIC_*_FETCH_CMP_0 internal function. */
6639 void
6640 expand_ifn_atomic_op_fetch_cmp_0 (gcall *call)
6642 tree cmp = gimple_call_arg (call, 0);
6643 tree ptr = gimple_call_arg (call, 1);
6644 tree arg = gimple_call_arg (call, 2);
6645 tree lhs = gimple_call_lhs (call);
6646 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6647 machine_mode mode = TYPE_MODE (TREE_TYPE (cmp));
6648 optab optab;
6649 rtx_code code;
6650 class expand_operand ops[5];
6652 gcc_assert (flag_inline_atomics);
6654 if (gimple_call_num_args (call) == 5)
6655 model = get_memmodel (gimple_call_arg (call, 3));
6657 rtx mem = get_builtin_sync_mem (ptr, mode);
6658 rtx op = expand_expr_force_mode (arg, mode);
6660 switch (gimple_call_internal_fn (call))
6662 case IFN_ATOMIC_ADD_FETCH_CMP_0:
6663 code = PLUS;
6664 optab = atomic_add_fetch_cmp_0_optab;
6665 break;
6666 case IFN_ATOMIC_SUB_FETCH_CMP_0:
6667 code = MINUS;
6668 optab = atomic_sub_fetch_cmp_0_optab;
6669 break;
6670 case IFN_ATOMIC_AND_FETCH_CMP_0:
6671 code = AND;
6672 optab = atomic_and_fetch_cmp_0_optab;
6673 break;
6674 case IFN_ATOMIC_OR_FETCH_CMP_0:
6675 code = IOR;
6676 optab = atomic_or_fetch_cmp_0_optab;
6677 break;
6678 case IFN_ATOMIC_XOR_FETCH_CMP_0:
6679 code = XOR;
6680 optab = atomic_xor_fetch_cmp_0_optab;
6681 break;
6682 default:
6683 gcc_unreachable ();
6686 enum rtx_code comp = UNKNOWN;
6687 switch (tree_to_uhwi (cmp))
6689 case ATOMIC_OP_FETCH_CMP_0_EQ: comp = EQ; break;
6690 case ATOMIC_OP_FETCH_CMP_0_NE: comp = NE; break;
6691 case ATOMIC_OP_FETCH_CMP_0_GT: comp = GT; break;
6692 case ATOMIC_OP_FETCH_CMP_0_GE: comp = GE; break;
6693 case ATOMIC_OP_FETCH_CMP_0_LT: comp = LT; break;
6694 case ATOMIC_OP_FETCH_CMP_0_LE: comp = LE; break;
6695 default: gcc_unreachable ();
6698 rtx target;
6699 if (lhs == NULL_TREE)
6700 target = gen_reg_rtx (TYPE_MODE (boolean_type_node));
6701 else
6702 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6703 enum insn_code icode = direct_optab_handler (optab, mode);
6704 gcc_assert (icode != CODE_FOR_nothing);
6705 create_output_operand (&ops[0], target, TYPE_MODE (boolean_type_node));
6706 create_fixed_operand (&ops[1], mem);
6707 create_convert_operand_to (&ops[2], op, mode, true);
6708 create_integer_operand (&ops[3], model);
6709 create_integer_operand (&ops[4], comp);
6710 if (maybe_expand_insn (icode, 5, ops))
6711 return;
6713 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, op,
6714 code, model, true);
6715 if (!result)
6717 bool is_atomic = gimple_call_num_args (call) == 5;
6718 tree tcall = gimple_call_arg (call, 3 + is_atomic);
6719 tree fndecl = gimple_call_addr_fndecl (tcall);
6720 tree type = TREE_TYPE (TREE_TYPE (fndecl));
6721 tree exp = build_call_nary (type, tcall,
6722 2 + is_atomic, ptr, arg,
6723 is_atomic
6724 ? gimple_call_arg (call, 3)
6725 : integer_zero_node);
6726 result = expand_builtin (exp, gen_reg_rtx (mode), NULL_RTX,
6727 mode, !lhs);
6730 if (lhs)
6732 result = emit_store_flag_force (target, comp, result, const0_rtx, mode,
6733 0, 1);
6734 if (result != target)
6735 emit_move_insn (target, result);
6739 /* Expand an atomic clear operation.
6740 void _atomic_clear (BOOL *obj, enum memmodel)
6741 EXP is the call expression. */
6743 static rtx
6744 expand_builtin_atomic_clear (tree exp)
6746 machine_mode mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6747 rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6748 memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6750 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6751 model = MEMMODEL_SEQ_CST;
6753 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6754 Failing that, a store is issued by __atomic_store. The only way this can
6755 fail is if the bool type is larger than a word size. Unlikely, but
6756 handle it anyway for completeness. Assume a single threaded model since
6757 there is no atomic support in this case, and no barriers are required. */
6758 rtx ret = expand_atomic_store (mem, const0_rtx, model, true);
6759 if (!ret)
6760 emit_move_insn (mem, const0_rtx);
6761 return const0_rtx;
6764 /* Expand an atomic test_and_set operation.
6765 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6766 EXP is the call expression. */
6768 static rtx
6769 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6771 rtx mem;
6772 enum memmodel model;
6773 machine_mode mode;
6775 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6776 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6777 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6779 return expand_atomic_test_and_set (target, mem, model);
6783 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6784 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6786 static tree
6787 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6789 int size;
6790 machine_mode mode;
6791 unsigned int mode_align, type_align;
6793 if (TREE_CODE (arg0) != INTEGER_CST)
6794 return NULL_TREE;
6796 /* We need a corresponding integer mode for the access to be lock-free. */
6797 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6798 if (!int_mode_for_size (size, 0).exists (&mode))
6799 return boolean_false_node;
6801 mode_align = GET_MODE_ALIGNMENT (mode);
6803 if (TREE_CODE (arg1) == INTEGER_CST)
6805 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6807 /* Either this argument is null, or it's a fake pointer encoding
6808 the alignment of the object. */
6809 val = least_bit_hwi (val);
6810 val *= BITS_PER_UNIT;
6812 if (val == 0 || mode_align < val)
6813 type_align = mode_align;
6814 else
6815 type_align = val;
6817 else
6819 tree ttype = TREE_TYPE (arg1);
6821 /* This function is usually invoked and folded immediately by the front
6822 end before anything else has a chance to look at it. The pointer
6823 parameter at this point is usually cast to a void *, so check for that
6824 and look past the cast. */
6825 if (CONVERT_EXPR_P (arg1)
6826 && POINTER_TYPE_P (ttype)
6827 && VOID_TYPE_P (TREE_TYPE (ttype))
6828 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6829 arg1 = TREE_OPERAND (arg1, 0);
6831 ttype = TREE_TYPE (arg1);
6832 gcc_assert (POINTER_TYPE_P (ttype));
6834 /* Get the underlying type of the object. */
6835 ttype = TREE_TYPE (ttype);
6836 type_align = TYPE_ALIGN (ttype);
6839 /* If the object has smaller alignment, the lock free routines cannot
6840 be used. */
6841 if (type_align < mode_align)
6842 return boolean_false_node;
6844 /* Check if a compare_and_swap pattern exists for the mode which represents
6845 the required size. The pattern is not allowed to fail, so the existence
6846 of the pattern indicates support is present. Also require that an
6847 atomic load exists for the required size. */
6848 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6849 return boolean_true_node;
6850 else
6851 return boolean_false_node;
6854 /* Return true if the parameters to call EXP represent an object which will
6855 always generate lock free instructions. The first argument represents the
6856 size of the object, and the second parameter is a pointer to the object
6857 itself. If NULL is passed for the object, then the result is based on
6858 typical alignment for an object of the specified size. Otherwise return
6859 false. */
6861 static rtx
6862 expand_builtin_atomic_always_lock_free (tree exp)
6864 tree size;
6865 tree arg0 = CALL_EXPR_ARG (exp, 0);
6866 tree arg1 = CALL_EXPR_ARG (exp, 1);
6868 if (TREE_CODE (arg0) != INTEGER_CST)
6870 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
6871 return const0_rtx;
6874 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6875 if (size == boolean_true_node)
6876 return const1_rtx;
6877 return const0_rtx;
6880 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6881 is lock free on this architecture. */
6883 static tree
6884 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6886 if (!flag_inline_atomics)
6887 return NULL_TREE;
6889 /* If it isn't always lock free, don't generate a result. */
6890 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6891 return boolean_true_node;
6893 return NULL_TREE;
6896 /* Return true if the parameters to call EXP represent an object which will
6897 always generate lock free instructions. The first argument represents the
6898 size of the object, and the second parameter is a pointer to the object
6899 itself. If NULL is passed for the object, then the result is based on
6900 typical alignment for an object of the specified size. Otherwise return
6901 NULL*/
6903 static rtx
6904 expand_builtin_atomic_is_lock_free (tree exp)
6906 tree size;
6907 tree arg0 = CALL_EXPR_ARG (exp, 0);
6908 tree arg1 = CALL_EXPR_ARG (exp, 1);
6910 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6912 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
6913 return NULL_RTX;
6916 if (!flag_inline_atomics)
6917 return NULL_RTX;
6919 /* If the value is known at compile time, return the RTX for it. */
6920 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6921 if (size == boolean_true_node)
6922 return const1_rtx;
6924 return NULL_RTX;
6927 /* Expand the __atomic_thread_fence intrinsic:
6928 void __atomic_thread_fence (enum memmodel)
6929 EXP is the CALL_EXPR. */
6931 static void
6932 expand_builtin_atomic_thread_fence (tree exp)
6934 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6935 expand_mem_thread_fence (model);
6938 /* Expand the __atomic_signal_fence intrinsic:
6939 void __atomic_signal_fence (enum memmodel)
6940 EXP is the CALL_EXPR. */
6942 static void
6943 expand_builtin_atomic_signal_fence (tree exp)
6945 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6946 expand_mem_signal_fence (model);
6949 /* Expand the __sync_synchronize intrinsic. */
6951 static void
6952 expand_builtin_sync_synchronize (void)
6954 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6957 static rtx
6958 expand_builtin_thread_pointer (tree exp, rtx target)
6960 enum insn_code icode;
6961 if (!validate_arglist (exp, VOID_TYPE))
6962 return const0_rtx;
6963 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6964 if (icode != CODE_FOR_nothing)
6966 class expand_operand op;
6967 /* If the target is not sutitable then create a new target. */
6968 if (target == NULL_RTX
6969 || !REG_P (target)
6970 || GET_MODE (target) != Pmode)
6971 target = gen_reg_rtx (Pmode);
6972 create_output_operand (&op, target, Pmode);
6973 expand_insn (icode, 1, &op);
6974 return target;
6976 error ("%<__builtin_thread_pointer%> is not supported on this target");
6977 return const0_rtx;
6980 static void
6981 expand_builtin_set_thread_pointer (tree exp)
6983 enum insn_code icode;
6984 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6985 return;
6986 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6987 if (icode != CODE_FOR_nothing)
6989 class expand_operand op;
6990 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6991 Pmode, EXPAND_NORMAL);
6992 create_input_operand (&op, val, Pmode);
6993 expand_insn (icode, 1, &op);
6994 return;
6996 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
7000 /* Emit code to restore the current value of stack. */
7002 static void
7003 expand_stack_restore (tree var)
7005 rtx_insn *prev;
7006 rtx sa = expand_normal (var);
7008 sa = convert_memory_address (Pmode, sa);
7010 prev = get_last_insn ();
7011 emit_stack_restore (SAVE_BLOCK, sa);
7013 record_new_stack_level ();
7015 fixup_args_size_notes (prev, get_last_insn (), 0);
7018 /* Emit code to save the current value of stack. */
7020 static rtx
7021 expand_stack_save (void)
7023 rtx ret = NULL_RTX;
7025 emit_stack_save (SAVE_BLOCK, &ret);
7026 return ret;
7029 /* Emit code to get the openacc gang, worker or vector id or size. */
7031 static rtx
7032 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
7034 const char *name;
7035 rtx fallback_retval;
7036 rtx_insn *(*gen_fn) (rtx, rtx);
7037 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
7039 case BUILT_IN_GOACC_PARLEVEL_ID:
7040 name = "__builtin_goacc_parlevel_id";
7041 fallback_retval = const0_rtx;
7042 gen_fn = targetm.gen_oacc_dim_pos;
7043 break;
7044 case BUILT_IN_GOACC_PARLEVEL_SIZE:
7045 name = "__builtin_goacc_parlevel_size";
7046 fallback_retval = const1_rtx;
7047 gen_fn = targetm.gen_oacc_dim_size;
7048 break;
7049 default:
7050 gcc_unreachable ();
7053 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
7055 error ("%qs only supported in OpenACC code", name);
7056 return const0_rtx;
7059 tree arg = CALL_EXPR_ARG (exp, 0);
7060 if (TREE_CODE (arg) != INTEGER_CST)
7062 error ("non-constant argument 0 to %qs", name);
7063 return const0_rtx;
7066 int dim = TREE_INT_CST_LOW (arg);
7067 switch (dim)
7069 case GOMP_DIM_GANG:
7070 case GOMP_DIM_WORKER:
7071 case GOMP_DIM_VECTOR:
7072 break;
7073 default:
7074 error ("illegal argument 0 to %qs", name);
7075 return const0_rtx;
7078 if (ignore)
7079 return target;
7081 if (target == NULL_RTX)
7082 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7084 if (!targetm.have_oacc_dim_size ())
7086 emit_move_insn (target, fallback_retval);
7087 return target;
7090 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
7091 emit_insn (gen_fn (reg, GEN_INT (dim)));
7092 if (reg != target)
7093 emit_move_insn (target, reg);
7095 return target;
7098 /* Expand a string compare operation using a sequence of char comparison
7099 to get rid of the calling overhead, with result going to TARGET if
7100 that's convenient.
7102 VAR_STR is the variable string source;
7103 CONST_STR is the constant string source;
7104 LENGTH is the number of chars to compare;
7105 CONST_STR_N indicates which source string is the constant string;
7106 IS_MEMCMP indicates whether it's a memcmp or strcmp.
7108 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7110 target = (int) (unsigned char) var_str[0]
7111 - (int) (unsigned char) const_str[0];
7112 if (target != 0)
7113 goto ne_label;
7115 target = (int) (unsigned char) var_str[length - 2]
7116 - (int) (unsigned char) const_str[length - 2];
7117 if (target != 0)
7118 goto ne_label;
7119 target = (int) (unsigned char) var_str[length - 1]
7120 - (int) (unsigned char) const_str[length - 1];
7121 ne_label:
7124 static rtx
7125 inline_string_cmp (rtx target, tree var_str, const char *const_str,
7126 unsigned HOST_WIDE_INT length,
7127 int const_str_n, machine_mode mode)
7129 HOST_WIDE_INT offset = 0;
7130 rtx var_rtx_array
7131 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
7132 rtx var_rtx = NULL_RTX;
7133 rtx const_rtx = NULL_RTX;
7134 rtx result = target ? target : gen_reg_rtx (mode);
7135 rtx_code_label *ne_label = gen_label_rtx ();
7136 tree unit_type_node = unsigned_char_type_node;
7137 scalar_int_mode unit_mode
7138 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
7140 start_sequence ();
7142 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
7144 var_rtx
7145 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
7146 const_rtx = c_readstr (const_str + offset, unit_mode);
7147 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
7148 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
7150 op0 = convert_modes (mode, unit_mode, op0, 1);
7151 op1 = convert_modes (mode, unit_mode, op1, 1);
7152 rtx diff = expand_simple_binop (mode, MINUS, op0, op1,
7153 result, 1, OPTAB_WIDEN);
7155 /* Force the difference into result register. We cannot reassign
7156 result here ("result = diff") or we may end up returning
7157 uninitialized result when expand_simple_binop allocates a new
7158 pseudo-register for returning. */
7159 if (diff != result)
7160 emit_move_insn (result, diff);
7162 if (i < length - 1)
7163 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
7164 mode, true, ne_label);
7165 offset += GET_MODE_SIZE (unit_mode);
7168 emit_label (ne_label);
7169 rtx_insn *insns = get_insns ();
7170 end_sequence ();
7171 emit_insn (insns);
7173 return result;
7176 /* Inline expansion of a call to str(n)cmp and memcmp, with result going
7177 to TARGET if that's convenient.
7178 If the call is not been inlined, return NULL_RTX. */
7180 static rtx
7181 inline_expand_builtin_bytecmp (tree exp, rtx target)
7183 tree fndecl = get_callee_fndecl (exp);
7184 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7185 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
7187 /* Do NOT apply this inlining expansion when optimizing for size or
7188 optimization level below 2 or if unused *cmp hasn't been DCEd. */
7189 if (optimize < 2 || optimize_insn_for_size_p () || target == const0_rtx)
7190 return NULL_RTX;
7192 gcc_checking_assert (fcode == BUILT_IN_STRCMP
7193 || fcode == BUILT_IN_STRNCMP
7194 || fcode == BUILT_IN_MEMCMP);
7196 /* On a target where the type of the call (int) has same or narrower presicion
7197 than unsigned char, give up the inlining expansion. */
7198 if (TYPE_PRECISION (unsigned_char_type_node)
7199 >= TYPE_PRECISION (TREE_TYPE (exp)))
7200 return NULL_RTX;
7202 tree arg1 = CALL_EXPR_ARG (exp, 0);
7203 tree arg2 = CALL_EXPR_ARG (exp, 1);
7204 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
7206 unsigned HOST_WIDE_INT len1 = 0;
7207 unsigned HOST_WIDE_INT len2 = 0;
7208 unsigned HOST_WIDE_INT len3 = 0;
7210 /* Get the object representation of the initializers of ARG1 and ARG2
7211 as strings, provided they refer to constant objects, with their byte
7212 sizes in LEN1 and LEN2, respectively. */
7213 const char *bytes1 = getbyterep (arg1, &len1);
7214 const char *bytes2 = getbyterep (arg2, &len2);
7216 /* Fail if neither argument refers to an initialized constant. */
7217 if (!bytes1 && !bytes2)
7218 return NULL_RTX;
7220 if (is_ncmp)
7222 /* Fail if the memcmp/strncmp bound is not a constant. */
7223 if (!tree_fits_uhwi_p (len3_tree))
7224 return NULL_RTX;
7226 len3 = tree_to_uhwi (len3_tree);
7228 if (fcode == BUILT_IN_MEMCMP)
7230 /* Fail if the memcmp bound is greater than the size of either
7231 of the two constant objects. */
7232 if ((bytes1 && len1 < len3)
7233 || (bytes2 && len2 < len3))
7234 return NULL_RTX;
7238 if (fcode != BUILT_IN_MEMCMP)
7240 /* For string functions (i.e., strcmp and strncmp) reduce LEN1
7241 and LEN2 to the length of the nul-terminated string stored
7242 in each. */
7243 if (bytes1 != NULL)
7244 len1 = strnlen (bytes1, len1) + 1;
7245 if (bytes2 != NULL)
7246 len2 = strnlen (bytes2, len2) + 1;
7249 /* See inline_string_cmp. */
7250 int const_str_n;
7251 if (!len1)
7252 const_str_n = 2;
7253 else if (!len2)
7254 const_str_n = 1;
7255 else if (len2 > len1)
7256 const_str_n = 1;
7257 else
7258 const_str_n = 2;
7260 /* For strncmp only, compute the new bound as the smallest of
7261 the lengths of the two strings (plus 1) and the bound provided
7262 to the function. */
7263 unsigned HOST_WIDE_INT bound = (const_str_n == 1) ? len1 : len2;
7264 if (is_ncmp && len3 < bound)
7265 bound = len3;
7267 /* If the bound of the comparison is larger than the threshold,
7268 do nothing. */
7269 if (bound > (unsigned HOST_WIDE_INT) param_builtin_string_cmp_inline_length)
7270 return NULL_RTX;
7272 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7274 /* Now, start inline expansion the call. */
7275 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
7276 (const_str_n == 1) ? bytes1 : bytes2, bound,
7277 const_str_n, mode);
7280 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7281 represents the size of the first argument to that call, or VOIDmode
7282 if the argument is a pointer. IGNORE will be true if the result
7283 isn't used. */
7284 static rtx
7285 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
7286 bool ignore)
7288 rtx val, failsafe;
7289 unsigned nargs = call_expr_nargs (exp);
7291 tree arg0 = CALL_EXPR_ARG (exp, 0);
7293 if (mode == VOIDmode)
7295 mode = TYPE_MODE (TREE_TYPE (arg0));
7296 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
7299 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
7301 /* An optional second argument can be used as a failsafe value on
7302 some machines. If it isn't present, then the failsafe value is
7303 assumed to be 0. */
7304 if (nargs > 1)
7306 tree arg1 = CALL_EXPR_ARG (exp, 1);
7307 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
7309 else
7310 failsafe = const0_rtx;
7312 /* If the result isn't used, the behavior is undefined. It would be
7313 nice to emit a warning here, but path splitting means this might
7314 happen with legitimate code. So simply drop the builtin
7315 expansion in that case; we've handled any side-effects above. */
7316 if (ignore)
7317 return const0_rtx;
7319 /* If we don't have a suitable target, create one to hold the result. */
7320 if (target == NULL || GET_MODE (target) != mode)
7321 target = gen_reg_rtx (mode);
7323 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7324 val = convert_modes (mode, VOIDmode, val, false);
7326 return targetm.speculation_safe_value (mode, target, val, failsafe);
7329 /* Expand an expression EXP that calls a built-in function,
7330 with result going to TARGET if that's convenient
7331 (and in mode MODE if that's convenient).
7332 SUBTARGET may be used as the target for computing one of EXP's operands.
7333 IGNORE is nonzero if the value is to be ignored. */
7336 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7337 int ignore)
7339 tree fndecl = get_callee_fndecl (exp);
7340 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7341 int flags;
7343 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7344 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7346 /* When ASan is enabled, we don't want to expand some memory/string
7347 builtins and rely on libsanitizer's hooks. This allows us to avoid
7348 redundant checks and be sure, that possible overflow will be detected
7349 by ASan. */
7351 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7352 if (param_asan_kernel_mem_intrinsic_prefix
7353 && sanitize_flags_p (SANITIZE_KERNEL_ADDRESS
7354 | SANITIZE_KERNEL_HWADDRESS))
7355 switch (fcode)
7357 rtx save_decl_rtl, ret;
7358 case BUILT_IN_MEMCPY:
7359 case BUILT_IN_MEMMOVE:
7360 case BUILT_IN_MEMSET:
7361 save_decl_rtl = DECL_RTL (fndecl);
7362 DECL_RTL (fndecl) = asan_memfn_rtl (fndecl);
7363 ret = expand_call (exp, target, ignore);
7364 DECL_RTL (fndecl) = save_decl_rtl;
7365 return ret;
7366 default:
7367 break;
7369 if (sanitize_flags_p (SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7370 return expand_call (exp, target, ignore);
7372 /* When not optimizing, generate calls to library functions for a certain
7373 set of builtins. */
7374 if (!optimize
7375 && !called_as_built_in (fndecl)
7376 && fcode != BUILT_IN_FORK
7377 && fcode != BUILT_IN_EXECL
7378 && fcode != BUILT_IN_EXECV
7379 && fcode != BUILT_IN_EXECLP
7380 && fcode != BUILT_IN_EXECLE
7381 && fcode != BUILT_IN_EXECVP
7382 && fcode != BUILT_IN_EXECVE
7383 && fcode != BUILT_IN_CLEAR_CACHE
7384 && !ALLOCA_FUNCTION_CODE_P (fcode)
7385 && fcode != BUILT_IN_FREE)
7386 return expand_call (exp, target, ignore);
7388 /* The built-in function expanders test for target == const0_rtx
7389 to determine whether the function's result will be ignored. */
7390 if (ignore)
7391 target = const0_rtx;
7393 /* If the result of a pure or const built-in function is ignored, and
7394 none of its arguments are volatile, we can avoid expanding the
7395 built-in call and just evaluate the arguments for side-effects. */
7396 if (target == const0_rtx
7397 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7398 && !(flags & ECF_LOOPING_CONST_OR_PURE))
7400 bool volatilep = false;
7401 tree arg;
7402 call_expr_arg_iterator iter;
7404 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7405 if (TREE_THIS_VOLATILE (arg))
7407 volatilep = true;
7408 break;
7411 if (! volatilep)
7413 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7414 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7415 return const0_rtx;
7419 switch (fcode)
7421 CASE_FLT_FN (BUILT_IN_FABS):
7422 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7423 case BUILT_IN_FABSD32:
7424 case BUILT_IN_FABSD64:
7425 case BUILT_IN_FABSD128:
7426 target = expand_builtin_fabs (exp, target, subtarget);
7427 if (target)
7428 return target;
7429 break;
7431 CASE_FLT_FN (BUILT_IN_COPYSIGN):
7432 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7433 target = expand_builtin_copysign (exp, target, subtarget);
7434 if (target)
7435 return target;
7436 break;
7438 /* Just do a normal library call if we were unable to fold
7439 the values. */
7440 CASE_FLT_FN (BUILT_IN_CABS):
7441 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CABS):
7442 break;
7444 CASE_FLT_FN (BUILT_IN_FMA):
7445 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7446 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7447 if (target)
7448 return target;
7449 break;
7451 CASE_FLT_FN (BUILT_IN_ILOGB):
7452 if (! flag_unsafe_math_optimizations)
7453 break;
7454 gcc_fallthrough ();
7455 CASE_FLT_FN (BUILT_IN_ISINF):
7456 CASE_FLT_FN (BUILT_IN_FINITE):
7457 case BUILT_IN_ISFINITE:
7458 case BUILT_IN_ISNORMAL:
7459 target = expand_builtin_interclass_mathfn (exp, target);
7460 if (target)
7461 return target;
7462 break;
7464 case BUILT_IN_ISSIGNALING:
7465 target = expand_builtin_issignaling (exp, target);
7466 if (target)
7467 return target;
7468 break;
7470 CASE_FLT_FN (BUILT_IN_ICEIL):
7471 CASE_FLT_FN (BUILT_IN_LCEIL):
7472 CASE_FLT_FN (BUILT_IN_LLCEIL):
7473 CASE_FLT_FN (BUILT_IN_LFLOOR):
7474 CASE_FLT_FN (BUILT_IN_IFLOOR):
7475 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7476 target = expand_builtin_int_roundingfn (exp, target);
7477 if (target)
7478 return target;
7479 break;
7481 CASE_FLT_FN (BUILT_IN_IRINT):
7482 CASE_FLT_FN (BUILT_IN_LRINT):
7483 CASE_FLT_FN (BUILT_IN_LLRINT):
7484 CASE_FLT_FN (BUILT_IN_IROUND):
7485 CASE_FLT_FN (BUILT_IN_LROUND):
7486 CASE_FLT_FN (BUILT_IN_LLROUND):
7487 target = expand_builtin_int_roundingfn_2 (exp, target);
7488 if (target)
7489 return target;
7490 break;
7492 CASE_FLT_FN (BUILT_IN_POWI):
7493 target = expand_builtin_powi (exp, target);
7494 if (target)
7495 return target;
7496 break;
7498 CASE_FLT_FN (BUILT_IN_CEXPI):
7499 target = expand_builtin_cexpi (exp, target);
7500 gcc_assert (target);
7501 return target;
7503 CASE_FLT_FN (BUILT_IN_SIN):
7504 CASE_FLT_FN (BUILT_IN_COS):
7505 if (! flag_unsafe_math_optimizations)
7506 break;
7507 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7508 if (target)
7509 return target;
7510 break;
7512 CASE_FLT_FN (BUILT_IN_SINCOS):
7513 if (! flag_unsafe_math_optimizations)
7514 break;
7515 target = expand_builtin_sincos (exp);
7516 if (target)
7517 return target;
7518 break;
7520 case BUILT_IN_FEGETROUND:
7521 target = expand_builtin_fegetround (exp, target, target_mode);
7522 if (target)
7523 return target;
7524 break;
7526 case BUILT_IN_FECLEAREXCEPT:
7527 target = expand_builtin_feclear_feraise_except (exp, target, target_mode,
7528 feclearexcept_optab);
7529 if (target)
7530 return target;
7531 break;
7533 case BUILT_IN_FERAISEEXCEPT:
7534 target = expand_builtin_feclear_feraise_except (exp, target, target_mode,
7535 feraiseexcept_optab);
7536 if (target)
7537 return target;
7538 break;
7540 case BUILT_IN_APPLY_ARGS:
7541 return expand_builtin_apply_args ();
7543 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7544 FUNCTION with a copy of the parameters described by
7545 ARGUMENTS, and ARGSIZE. It returns a block of memory
7546 allocated on the stack into which is stored all the registers
7547 that might possibly be used for returning the result of a
7548 function. ARGUMENTS is the value returned by
7549 __builtin_apply_args. ARGSIZE is the number of bytes of
7550 arguments that must be copied. ??? How should this value be
7551 computed? We'll also need a safe worst case value for varargs
7552 functions. */
7553 case BUILT_IN_APPLY:
7554 if (!validate_arglist (exp, POINTER_TYPE,
7555 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7556 && !validate_arglist (exp, REFERENCE_TYPE,
7557 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7558 return const0_rtx;
7559 else
7561 rtx ops[3];
7563 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7564 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7565 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7567 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7570 /* __builtin_return (RESULT) causes the function to return the
7571 value described by RESULT. RESULT is address of the block of
7572 memory returned by __builtin_apply. */
7573 case BUILT_IN_RETURN:
7574 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7575 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7576 return const0_rtx;
7578 case BUILT_IN_SAVEREGS:
7579 return expand_builtin_saveregs ();
7581 case BUILT_IN_VA_ARG_PACK:
7582 /* All valid uses of __builtin_va_arg_pack () are removed during
7583 inlining. */
7584 error ("invalid use of %<__builtin_va_arg_pack ()%>");
7585 return const0_rtx;
7587 case BUILT_IN_VA_ARG_PACK_LEN:
7588 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7589 inlining. */
7590 error ("invalid use of %<__builtin_va_arg_pack_len ()%>");
7591 return const0_rtx;
7593 /* Return the address of the first anonymous stack arg. */
7594 case BUILT_IN_NEXT_ARG:
7595 if (fold_builtin_next_arg (exp, false))
7596 return const0_rtx;
7597 return expand_builtin_next_arg ();
7599 case BUILT_IN_CLEAR_CACHE:
7600 expand_builtin___clear_cache (exp);
7601 return const0_rtx;
7603 case BUILT_IN_CLASSIFY_TYPE:
7604 return expand_builtin_classify_type (exp);
7606 case BUILT_IN_CONSTANT_P:
7607 return const0_rtx;
7609 case BUILT_IN_FRAME_ADDRESS:
7610 case BUILT_IN_RETURN_ADDRESS:
7611 return expand_builtin_frame_address (fndecl, exp);
7613 /* Returns the address of the area where the structure is returned.
7614 0 otherwise. */
7615 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7616 if (call_expr_nargs (exp) != 0
7617 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7618 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7619 return const0_rtx;
7620 else
7621 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7623 CASE_BUILT_IN_ALLOCA:
7624 target = expand_builtin_alloca (exp);
7625 if (target)
7626 return target;
7627 break;
7629 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7630 return expand_asan_emit_allocas_unpoison (exp);
7632 case BUILT_IN_STACK_SAVE:
7633 return expand_stack_save ();
7635 case BUILT_IN_STACK_RESTORE:
7636 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7637 return const0_rtx;
7639 case BUILT_IN_BSWAP16:
7640 case BUILT_IN_BSWAP32:
7641 case BUILT_IN_BSWAP64:
7642 case BUILT_IN_BSWAP128:
7643 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7644 if (target)
7645 return target;
7646 break;
7648 CASE_INT_FN (BUILT_IN_FFS):
7649 target = expand_builtin_unop (target_mode, exp, target,
7650 subtarget, ffs_optab);
7651 if (target)
7652 return target;
7653 break;
7655 CASE_INT_FN (BUILT_IN_CLZ):
7656 target = expand_builtin_unop (target_mode, exp, target,
7657 subtarget, clz_optab);
7658 if (target)
7659 return target;
7660 break;
7662 CASE_INT_FN (BUILT_IN_CTZ):
7663 target = expand_builtin_unop (target_mode, exp, target,
7664 subtarget, ctz_optab);
7665 if (target)
7666 return target;
7667 break;
7669 CASE_INT_FN (BUILT_IN_CLRSB):
7670 target = expand_builtin_unop (target_mode, exp, target,
7671 subtarget, clrsb_optab);
7672 if (target)
7673 return target;
7674 break;
7676 CASE_INT_FN (BUILT_IN_POPCOUNT):
7677 target = expand_builtin_unop (target_mode, exp, target,
7678 subtarget, popcount_optab);
7679 if (target)
7680 return target;
7681 break;
7683 CASE_INT_FN (BUILT_IN_PARITY):
7684 target = expand_builtin_unop (target_mode, exp, target,
7685 subtarget, parity_optab);
7686 if (target)
7687 return target;
7688 break;
7690 case BUILT_IN_STRLEN:
7691 target = expand_builtin_strlen (exp, target, target_mode);
7692 if (target)
7693 return target;
7694 break;
7696 case BUILT_IN_STRNLEN:
7697 target = expand_builtin_strnlen (exp, target, target_mode);
7698 if (target)
7699 return target;
7700 break;
7702 case BUILT_IN_STRCPY:
7703 target = expand_builtin_strcpy (exp, target);
7704 if (target)
7705 return target;
7706 break;
7708 case BUILT_IN_STRNCPY:
7709 target = expand_builtin_strncpy (exp, target);
7710 if (target)
7711 return target;
7712 break;
7714 case BUILT_IN_STPCPY:
7715 target = expand_builtin_stpcpy (exp, target, mode);
7716 if (target)
7717 return target;
7718 break;
7720 case BUILT_IN_MEMCPY:
7721 target = expand_builtin_memcpy (exp, target);
7722 if (target)
7723 return target;
7724 break;
7726 case BUILT_IN_MEMMOVE:
7727 target = expand_builtin_memmove (exp, target);
7728 if (target)
7729 return target;
7730 break;
7732 case BUILT_IN_MEMPCPY:
7733 target = expand_builtin_mempcpy (exp, target);
7734 if (target)
7735 return target;
7736 break;
7738 case BUILT_IN_MEMSET:
7739 target = expand_builtin_memset (exp, target, mode);
7740 if (target)
7741 return target;
7742 break;
7744 case BUILT_IN_BZERO:
7745 target = expand_builtin_bzero (exp);
7746 if (target)
7747 return target;
7748 break;
7750 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7751 back to a BUILT_IN_STRCMP. Remember to delete the 3rd parameter
7752 when changing it to a strcmp call. */
7753 case BUILT_IN_STRCMP_EQ:
7754 target = expand_builtin_memcmp (exp, target, true);
7755 if (target)
7756 return target;
7758 /* Change this call back to a BUILT_IN_STRCMP. */
7759 TREE_OPERAND (exp, 1)
7760 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7762 /* Delete the last parameter. */
7763 unsigned int i;
7764 vec<tree, va_gc> *arg_vec;
7765 vec_alloc (arg_vec, 2);
7766 for (i = 0; i < 2; i++)
7767 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7768 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7769 /* FALLTHROUGH */
7771 case BUILT_IN_STRCMP:
7772 target = expand_builtin_strcmp (exp, target);
7773 if (target)
7774 return target;
7775 break;
7777 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7778 back to a BUILT_IN_STRNCMP. */
7779 case BUILT_IN_STRNCMP_EQ:
7780 target = expand_builtin_memcmp (exp, target, true);
7781 if (target)
7782 return target;
7784 /* Change it back to a BUILT_IN_STRNCMP. */
7785 TREE_OPERAND (exp, 1)
7786 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7787 /* FALLTHROUGH */
7789 case BUILT_IN_STRNCMP:
7790 target = expand_builtin_strncmp (exp, target, mode);
7791 if (target)
7792 return target;
7793 break;
7795 case BUILT_IN_BCMP:
7796 case BUILT_IN_MEMCMP:
7797 case BUILT_IN_MEMCMP_EQ:
7798 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7799 if (target)
7800 return target;
7801 if (fcode == BUILT_IN_MEMCMP_EQ)
7803 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7804 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7806 break;
7808 case BUILT_IN_SETJMP:
7809 /* This should have been lowered to the builtins below. */
7810 gcc_unreachable ();
7812 case BUILT_IN_SETJMP_SETUP:
7813 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7814 and the receiver label. */
7815 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7817 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7818 VOIDmode, EXPAND_NORMAL);
7819 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7820 rtx_insn *label_r = label_rtx (label);
7822 expand_builtin_setjmp_setup (buf_addr, label_r);
7823 return const0_rtx;
7825 break;
7827 case BUILT_IN_SETJMP_RECEIVER:
7828 /* __builtin_setjmp_receiver is passed the receiver label. */
7829 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7831 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7832 rtx_insn *label_r = label_rtx (label);
7834 expand_builtin_setjmp_receiver (label_r);
7835 nonlocal_goto_handler_labels
7836 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7837 nonlocal_goto_handler_labels);
7838 /* ??? Do not let expand_label treat us as such since we would
7839 not want to be both on the list of non-local labels and on
7840 the list of forced labels. */
7841 FORCED_LABEL (label) = 0;
7842 return const0_rtx;
7844 break;
7846 /* __builtin_longjmp is passed a pointer to an array of five words.
7847 It's similar to the C library longjmp function but works with
7848 __builtin_setjmp above. */
7849 case BUILT_IN_LONGJMP:
7850 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7852 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7853 VOIDmode, EXPAND_NORMAL);
7854 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7856 if (value != const1_rtx)
7858 error ("%<__builtin_longjmp%> second argument must be 1");
7859 return const0_rtx;
7862 expand_builtin_longjmp (buf_addr, value);
7863 return const0_rtx;
7865 break;
7867 case BUILT_IN_NONLOCAL_GOTO:
7868 target = expand_builtin_nonlocal_goto (exp);
7869 if (target)
7870 return target;
7871 break;
7873 /* This updates the setjmp buffer that is its argument with the value
7874 of the current stack pointer. */
7875 case BUILT_IN_UPDATE_SETJMP_BUF:
7876 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7878 rtx buf_addr
7879 = expand_normal (CALL_EXPR_ARG (exp, 0));
7881 expand_builtin_update_setjmp_buf (buf_addr);
7882 return const0_rtx;
7884 break;
7886 case BUILT_IN_TRAP:
7887 case BUILT_IN_UNREACHABLE_TRAP:
7888 expand_builtin_trap ();
7889 return const0_rtx;
7891 case BUILT_IN_UNREACHABLE:
7892 expand_builtin_unreachable ();
7893 return const0_rtx;
7895 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7896 case BUILT_IN_SIGNBITD32:
7897 case BUILT_IN_SIGNBITD64:
7898 case BUILT_IN_SIGNBITD128:
7899 target = expand_builtin_signbit (exp, target);
7900 if (target)
7901 return target;
7902 break;
7904 /* Various hooks for the DWARF 2 __throw routine. */
7905 case BUILT_IN_UNWIND_INIT:
7906 expand_builtin_unwind_init ();
7907 return const0_rtx;
7908 case BUILT_IN_DWARF_CFA:
7909 return virtual_cfa_rtx;
7910 #ifdef DWARF2_UNWIND_INFO
7911 case BUILT_IN_DWARF_SP_COLUMN:
7912 return expand_builtin_dwarf_sp_column ();
7913 case BUILT_IN_INIT_DWARF_REG_SIZES:
7914 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7915 return const0_rtx;
7916 #endif
7917 case BUILT_IN_FROB_RETURN_ADDR:
7918 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7919 case BUILT_IN_EXTRACT_RETURN_ADDR:
7920 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7921 case BUILT_IN_EH_RETURN:
7922 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7923 CALL_EXPR_ARG (exp, 1));
7924 return const0_rtx;
7925 case BUILT_IN_EH_RETURN_DATA_REGNO:
7926 return expand_builtin_eh_return_data_regno (exp);
7927 case BUILT_IN_EXTEND_POINTER:
7928 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7929 case BUILT_IN_EH_POINTER:
7930 return expand_builtin_eh_pointer (exp);
7931 case BUILT_IN_EH_FILTER:
7932 return expand_builtin_eh_filter (exp);
7933 case BUILT_IN_EH_COPY_VALUES:
7934 return expand_builtin_eh_copy_values (exp);
7936 case BUILT_IN_VA_START:
7937 return expand_builtin_va_start (exp);
7938 case BUILT_IN_VA_END:
7939 return expand_builtin_va_end (exp);
7940 case BUILT_IN_VA_COPY:
7941 return expand_builtin_va_copy (exp);
7942 case BUILT_IN_EXPECT:
7943 return expand_builtin_expect (exp, target);
7944 case BUILT_IN_EXPECT_WITH_PROBABILITY:
7945 return expand_builtin_expect_with_probability (exp, target);
7946 case BUILT_IN_ASSUME_ALIGNED:
7947 return expand_builtin_assume_aligned (exp, target);
7948 case BUILT_IN_PREFETCH:
7949 expand_builtin_prefetch (exp);
7950 return const0_rtx;
7952 case BUILT_IN_INIT_TRAMPOLINE:
7953 return expand_builtin_init_trampoline (exp, true);
7954 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7955 return expand_builtin_init_trampoline (exp, false);
7956 case BUILT_IN_ADJUST_TRAMPOLINE:
7957 return expand_builtin_adjust_trampoline (exp);
7959 case BUILT_IN_INIT_DESCRIPTOR:
7960 return expand_builtin_init_descriptor (exp);
7961 case BUILT_IN_ADJUST_DESCRIPTOR:
7962 return expand_builtin_adjust_descriptor (exp);
7964 case BUILT_IN_FORK:
7965 case BUILT_IN_EXECL:
7966 case BUILT_IN_EXECV:
7967 case BUILT_IN_EXECLP:
7968 case BUILT_IN_EXECLE:
7969 case BUILT_IN_EXECVP:
7970 case BUILT_IN_EXECVE:
7971 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7972 if (target)
7973 return target;
7974 break;
7976 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7977 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7978 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7979 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7980 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7981 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7982 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7983 if (target)
7984 return target;
7985 break;
7987 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7988 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7989 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7990 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7991 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7992 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7993 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7994 if (target)
7995 return target;
7996 break;
7998 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7999 case BUILT_IN_SYNC_FETCH_AND_OR_2:
8000 case BUILT_IN_SYNC_FETCH_AND_OR_4:
8001 case BUILT_IN_SYNC_FETCH_AND_OR_8:
8002 case BUILT_IN_SYNC_FETCH_AND_OR_16:
8003 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
8004 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
8005 if (target)
8006 return target;
8007 break;
8009 case BUILT_IN_SYNC_FETCH_AND_AND_1:
8010 case BUILT_IN_SYNC_FETCH_AND_AND_2:
8011 case BUILT_IN_SYNC_FETCH_AND_AND_4:
8012 case BUILT_IN_SYNC_FETCH_AND_AND_8:
8013 case BUILT_IN_SYNC_FETCH_AND_AND_16:
8014 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
8015 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
8016 if (target)
8017 return target;
8018 break;
8020 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
8021 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
8022 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
8023 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
8024 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
8025 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
8026 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
8027 if (target)
8028 return target;
8029 break;
8031 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
8032 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
8033 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
8034 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
8035 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
8036 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
8037 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
8038 if (target)
8039 return target;
8040 break;
8042 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
8043 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
8044 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
8045 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
8046 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
8047 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
8048 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
8049 if (target)
8050 return target;
8051 break;
8053 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
8054 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
8055 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
8056 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
8057 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
8058 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
8059 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
8060 if (target)
8061 return target;
8062 break;
8064 case BUILT_IN_SYNC_OR_AND_FETCH_1:
8065 case BUILT_IN_SYNC_OR_AND_FETCH_2:
8066 case BUILT_IN_SYNC_OR_AND_FETCH_4:
8067 case BUILT_IN_SYNC_OR_AND_FETCH_8:
8068 case BUILT_IN_SYNC_OR_AND_FETCH_16:
8069 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
8070 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
8071 if (target)
8072 return target;
8073 break;
8075 case BUILT_IN_SYNC_AND_AND_FETCH_1:
8076 case BUILT_IN_SYNC_AND_AND_FETCH_2:
8077 case BUILT_IN_SYNC_AND_AND_FETCH_4:
8078 case BUILT_IN_SYNC_AND_AND_FETCH_8:
8079 case BUILT_IN_SYNC_AND_AND_FETCH_16:
8080 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
8081 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
8082 if (target)
8083 return target;
8084 break;
8086 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
8087 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
8088 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
8089 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
8090 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
8091 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
8092 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
8093 if (target)
8094 return target;
8095 break;
8097 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
8098 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
8099 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
8100 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
8101 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
8102 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
8103 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
8104 if (target)
8105 return target;
8106 break;
8108 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
8109 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
8110 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
8111 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
8112 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
8113 if (mode == VOIDmode)
8114 mode = TYPE_MODE (boolean_type_node);
8115 if (!target || !register_operand (target, mode))
8116 target = gen_reg_rtx (mode);
8118 mode = get_builtin_sync_mode
8119 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
8120 target = expand_builtin_compare_and_swap (mode, exp, true, target);
8121 if (target)
8122 return target;
8123 break;
8125 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
8126 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
8127 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
8128 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
8129 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
8130 mode = get_builtin_sync_mode
8131 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
8132 target = expand_builtin_compare_and_swap (mode, exp, false, target);
8133 if (target)
8134 return target;
8135 break;
8137 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
8138 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
8139 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
8140 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
8141 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
8142 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
8143 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
8144 if (target)
8145 return target;
8146 break;
8148 case BUILT_IN_SYNC_LOCK_RELEASE_1:
8149 case BUILT_IN_SYNC_LOCK_RELEASE_2:
8150 case BUILT_IN_SYNC_LOCK_RELEASE_4:
8151 case BUILT_IN_SYNC_LOCK_RELEASE_8:
8152 case BUILT_IN_SYNC_LOCK_RELEASE_16:
8153 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
8154 expand_builtin_sync_lock_release (mode, exp);
8155 return const0_rtx;
8157 case BUILT_IN_SYNC_SYNCHRONIZE:
8158 expand_builtin_sync_synchronize ();
8159 return const0_rtx;
8161 case BUILT_IN_ATOMIC_EXCHANGE_1:
8162 case BUILT_IN_ATOMIC_EXCHANGE_2:
8163 case BUILT_IN_ATOMIC_EXCHANGE_4:
8164 case BUILT_IN_ATOMIC_EXCHANGE_8:
8165 case BUILT_IN_ATOMIC_EXCHANGE_16:
8166 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
8167 target = expand_builtin_atomic_exchange (mode, exp, target);
8168 if (target)
8169 return target;
8170 break;
8172 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
8173 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
8174 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
8175 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
8176 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
8178 unsigned int nargs, z;
8179 vec<tree, va_gc> *vec;
8181 mode =
8182 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
8183 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
8184 if (target)
8185 return target;
8187 /* If this is turned into an external library call, the weak parameter
8188 must be dropped to match the expected parameter list. */
8189 nargs = call_expr_nargs (exp);
8190 vec_alloc (vec, nargs - 1);
8191 for (z = 0; z < 3; z++)
8192 vec->quick_push (CALL_EXPR_ARG (exp, z));
8193 /* Skip the boolean weak parameter. */
8194 for (z = 4; z < 6; z++)
8195 vec->quick_push (CALL_EXPR_ARG (exp, z));
8196 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
8197 break;
8200 case BUILT_IN_ATOMIC_LOAD_1:
8201 case BUILT_IN_ATOMIC_LOAD_2:
8202 case BUILT_IN_ATOMIC_LOAD_4:
8203 case BUILT_IN_ATOMIC_LOAD_8:
8204 case BUILT_IN_ATOMIC_LOAD_16:
8205 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
8206 target = expand_builtin_atomic_load (mode, exp, target);
8207 if (target)
8208 return target;
8209 break;
8211 case BUILT_IN_ATOMIC_STORE_1:
8212 case BUILT_IN_ATOMIC_STORE_2:
8213 case BUILT_IN_ATOMIC_STORE_4:
8214 case BUILT_IN_ATOMIC_STORE_8:
8215 case BUILT_IN_ATOMIC_STORE_16:
8216 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
8217 target = expand_builtin_atomic_store (mode, exp);
8218 if (target)
8219 return const0_rtx;
8220 break;
8222 case BUILT_IN_ATOMIC_ADD_FETCH_1:
8223 case BUILT_IN_ATOMIC_ADD_FETCH_2:
8224 case BUILT_IN_ATOMIC_ADD_FETCH_4:
8225 case BUILT_IN_ATOMIC_ADD_FETCH_8:
8226 case BUILT_IN_ATOMIC_ADD_FETCH_16:
8228 enum built_in_function lib;
8229 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
8230 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
8231 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
8232 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
8233 ignore, lib);
8234 if (target)
8235 return target;
8236 break;
8238 case BUILT_IN_ATOMIC_SUB_FETCH_1:
8239 case BUILT_IN_ATOMIC_SUB_FETCH_2:
8240 case BUILT_IN_ATOMIC_SUB_FETCH_4:
8241 case BUILT_IN_ATOMIC_SUB_FETCH_8:
8242 case BUILT_IN_ATOMIC_SUB_FETCH_16:
8244 enum built_in_function lib;
8245 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
8246 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
8247 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
8248 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
8249 ignore, lib);
8250 if (target)
8251 return target;
8252 break;
8254 case BUILT_IN_ATOMIC_AND_FETCH_1:
8255 case BUILT_IN_ATOMIC_AND_FETCH_2:
8256 case BUILT_IN_ATOMIC_AND_FETCH_4:
8257 case BUILT_IN_ATOMIC_AND_FETCH_8:
8258 case BUILT_IN_ATOMIC_AND_FETCH_16:
8260 enum built_in_function lib;
8261 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
8262 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
8263 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
8264 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
8265 ignore, lib);
8266 if (target)
8267 return target;
8268 break;
8270 case BUILT_IN_ATOMIC_NAND_FETCH_1:
8271 case BUILT_IN_ATOMIC_NAND_FETCH_2:
8272 case BUILT_IN_ATOMIC_NAND_FETCH_4:
8273 case BUILT_IN_ATOMIC_NAND_FETCH_8:
8274 case BUILT_IN_ATOMIC_NAND_FETCH_16:
8276 enum built_in_function lib;
8277 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
8278 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
8279 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
8280 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
8281 ignore, lib);
8282 if (target)
8283 return target;
8284 break;
8286 case BUILT_IN_ATOMIC_XOR_FETCH_1:
8287 case BUILT_IN_ATOMIC_XOR_FETCH_2:
8288 case BUILT_IN_ATOMIC_XOR_FETCH_4:
8289 case BUILT_IN_ATOMIC_XOR_FETCH_8:
8290 case BUILT_IN_ATOMIC_XOR_FETCH_16:
8292 enum built_in_function lib;
8293 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
8294 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
8295 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
8296 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
8297 ignore, lib);
8298 if (target)
8299 return target;
8300 break;
8302 case BUILT_IN_ATOMIC_OR_FETCH_1:
8303 case BUILT_IN_ATOMIC_OR_FETCH_2:
8304 case BUILT_IN_ATOMIC_OR_FETCH_4:
8305 case BUILT_IN_ATOMIC_OR_FETCH_8:
8306 case BUILT_IN_ATOMIC_OR_FETCH_16:
8308 enum built_in_function lib;
8309 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
8310 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
8311 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
8312 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
8313 ignore, lib);
8314 if (target)
8315 return target;
8316 break;
8318 case BUILT_IN_ATOMIC_FETCH_ADD_1:
8319 case BUILT_IN_ATOMIC_FETCH_ADD_2:
8320 case BUILT_IN_ATOMIC_FETCH_ADD_4:
8321 case BUILT_IN_ATOMIC_FETCH_ADD_8:
8322 case BUILT_IN_ATOMIC_FETCH_ADD_16:
8323 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
8324 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
8325 ignore, BUILT_IN_NONE);
8326 if (target)
8327 return target;
8328 break;
8330 case BUILT_IN_ATOMIC_FETCH_SUB_1:
8331 case BUILT_IN_ATOMIC_FETCH_SUB_2:
8332 case BUILT_IN_ATOMIC_FETCH_SUB_4:
8333 case BUILT_IN_ATOMIC_FETCH_SUB_8:
8334 case BUILT_IN_ATOMIC_FETCH_SUB_16:
8335 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
8336 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
8337 ignore, BUILT_IN_NONE);
8338 if (target)
8339 return target;
8340 break;
8342 case BUILT_IN_ATOMIC_FETCH_AND_1:
8343 case BUILT_IN_ATOMIC_FETCH_AND_2:
8344 case BUILT_IN_ATOMIC_FETCH_AND_4:
8345 case BUILT_IN_ATOMIC_FETCH_AND_8:
8346 case BUILT_IN_ATOMIC_FETCH_AND_16:
8347 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8348 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8349 ignore, BUILT_IN_NONE);
8350 if (target)
8351 return target;
8352 break;
8354 case BUILT_IN_ATOMIC_FETCH_NAND_1:
8355 case BUILT_IN_ATOMIC_FETCH_NAND_2:
8356 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8357 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8358 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8359 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8360 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8361 ignore, BUILT_IN_NONE);
8362 if (target)
8363 return target;
8364 break;
8366 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8367 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8368 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8369 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8370 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8371 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8372 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8373 ignore, BUILT_IN_NONE);
8374 if (target)
8375 return target;
8376 break;
8378 case BUILT_IN_ATOMIC_FETCH_OR_1:
8379 case BUILT_IN_ATOMIC_FETCH_OR_2:
8380 case BUILT_IN_ATOMIC_FETCH_OR_4:
8381 case BUILT_IN_ATOMIC_FETCH_OR_8:
8382 case BUILT_IN_ATOMIC_FETCH_OR_16:
8383 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8384 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8385 ignore, BUILT_IN_NONE);
8386 if (target)
8387 return target;
8388 break;
8390 case BUILT_IN_ATOMIC_TEST_AND_SET:
8391 return expand_builtin_atomic_test_and_set (exp, target);
8393 case BUILT_IN_ATOMIC_CLEAR:
8394 return expand_builtin_atomic_clear (exp);
8396 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8397 return expand_builtin_atomic_always_lock_free (exp);
8399 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8400 target = expand_builtin_atomic_is_lock_free (exp);
8401 if (target)
8402 return target;
8403 break;
8405 case BUILT_IN_ATOMIC_THREAD_FENCE:
8406 expand_builtin_atomic_thread_fence (exp);
8407 return const0_rtx;
8409 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8410 expand_builtin_atomic_signal_fence (exp);
8411 return const0_rtx;
8413 case BUILT_IN_OBJECT_SIZE:
8414 case BUILT_IN_DYNAMIC_OBJECT_SIZE:
8415 return expand_builtin_object_size (exp);
8417 case BUILT_IN_MEMCPY_CHK:
8418 case BUILT_IN_MEMPCPY_CHK:
8419 case BUILT_IN_MEMMOVE_CHK:
8420 case BUILT_IN_MEMSET_CHK:
8421 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8422 if (target)
8423 return target;
8424 break;
8426 case BUILT_IN_STRCPY_CHK:
8427 case BUILT_IN_STPCPY_CHK:
8428 case BUILT_IN_STRNCPY_CHK:
8429 case BUILT_IN_STPNCPY_CHK:
8430 case BUILT_IN_STRCAT_CHK:
8431 case BUILT_IN_STRNCAT_CHK:
8432 case BUILT_IN_SNPRINTF_CHK:
8433 case BUILT_IN_VSNPRINTF_CHK:
8434 maybe_emit_chk_warning (exp, fcode);
8435 break;
8437 case BUILT_IN_SPRINTF_CHK:
8438 case BUILT_IN_VSPRINTF_CHK:
8439 maybe_emit_sprintf_chk_warning (exp, fcode);
8440 break;
8442 case BUILT_IN_THREAD_POINTER:
8443 return expand_builtin_thread_pointer (exp, target);
8445 case BUILT_IN_SET_THREAD_POINTER:
8446 expand_builtin_set_thread_pointer (exp);
8447 return const0_rtx;
8449 case BUILT_IN_ACC_ON_DEVICE:
8450 /* Do library call, if we failed to expand the builtin when
8451 folding. */
8452 break;
8454 case BUILT_IN_GOACC_PARLEVEL_ID:
8455 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8456 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8458 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8459 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8461 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8462 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8463 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8464 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8465 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8466 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8467 return expand_speculation_safe_value (mode, exp, target, ignore);
8469 default: /* just do library call, if unknown builtin */
8470 break;
8473 /* The switch statement above can drop through to cause the function
8474 to be called normally. */
8475 return expand_call (exp, target, ignore);
8478 /* Determine whether a tree node represents a call to a built-in
8479 function. If the tree T is a call to a built-in function with
8480 the right number of arguments of the appropriate types, return
8481 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8482 Otherwise the return value is END_BUILTINS. */
8484 enum built_in_function
8485 builtin_mathfn_code (const_tree t)
8487 const_tree fndecl, arg, parmlist;
8488 const_tree argtype, parmtype;
8489 const_call_expr_arg_iterator iter;
8491 if (TREE_CODE (t) != CALL_EXPR)
8492 return END_BUILTINS;
8494 fndecl = get_callee_fndecl (t);
8495 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8496 return END_BUILTINS;
8498 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8499 init_const_call_expr_arg_iterator (t, &iter);
8500 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8502 /* If a function doesn't take a variable number of arguments,
8503 the last element in the list will have type `void'. */
8504 parmtype = TREE_VALUE (parmlist);
8505 if (VOID_TYPE_P (parmtype))
8507 if (more_const_call_expr_args_p (&iter))
8508 return END_BUILTINS;
8509 return DECL_FUNCTION_CODE (fndecl);
8512 if (! more_const_call_expr_args_p (&iter))
8513 return END_BUILTINS;
8515 arg = next_const_call_expr_arg (&iter);
8516 argtype = TREE_TYPE (arg);
8518 if (SCALAR_FLOAT_TYPE_P (parmtype))
8520 if (! SCALAR_FLOAT_TYPE_P (argtype))
8521 return END_BUILTINS;
8523 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8525 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8526 return END_BUILTINS;
8528 else if (POINTER_TYPE_P (parmtype))
8530 if (! POINTER_TYPE_P (argtype))
8531 return END_BUILTINS;
8533 else if (INTEGRAL_TYPE_P (parmtype))
8535 if (! INTEGRAL_TYPE_P (argtype))
8536 return END_BUILTINS;
8538 else
8539 return END_BUILTINS;
8542 /* Variable-length argument list. */
8543 return DECL_FUNCTION_CODE (fndecl);
8546 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8547 evaluate to a constant. */
8549 static tree
8550 fold_builtin_constant_p (tree arg)
8552 /* We return 1 for a numeric type that's known to be a constant
8553 value at compile-time or for an aggregate type that's a
8554 literal constant. */
8555 STRIP_NOPS (arg);
8557 /* If we know this is a constant, emit the constant of one. */
8558 if (CONSTANT_CLASS_P (arg)
8559 || (TREE_CODE (arg) == CONSTRUCTOR
8560 && TREE_CONSTANT (arg)))
8561 return integer_one_node;
8562 if (TREE_CODE (arg) == ADDR_EXPR)
8564 tree op = TREE_OPERAND (arg, 0);
8565 if (TREE_CODE (op) == STRING_CST
8566 || (TREE_CODE (op) == ARRAY_REF
8567 && integer_zerop (TREE_OPERAND (op, 1))
8568 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8569 return integer_one_node;
8572 /* If this expression has side effects, show we don't know it to be a
8573 constant. Likewise if it's a pointer or aggregate type since in
8574 those case we only want literals, since those are only optimized
8575 when generating RTL, not later.
8576 And finally, if we are compiling an initializer, not code, we
8577 need to return a definite result now; there's not going to be any
8578 more optimization done. */
8579 if (TREE_SIDE_EFFECTS (arg)
8580 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8581 || POINTER_TYPE_P (TREE_TYPE (arg))
8582 || cfun == 0
8583 || folding_initializer
8584 || force_folding_builtin_constant_p)
8585 return integer_zero_node;
8587 return NULL_TREE;
8590 /* Create builtin_expect or builtin_expect_with_probability
8591 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8592 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8593 builtin_expect_with_probability instead uses third argument as PROBABILITY
8594 value. */
8596 static tree
8597 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8598 tree predictor, tree probability)
8600 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
8602 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
8603 : BUILT_IN_EXPECT_WITH_PROBABILITY);
8604 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8605 ret_type = TREE_TYPE (TREE_TYPE (fn));
8606 pred_type = TREE_VALUE (arg_types);
8607 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8609 pred = fold_convert_loc (loc, pred_type, pred);
8610 expected = fold_convert_loc (loc, expected_type, expected);
8612 if (probability)
8613 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
8614 else
8615 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8616 predictor);
8618 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8619 build_int_cst (ret_type, 0));
8622 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
8623 NULL_TREE if no simplification is possible. */
8625 tree
8626 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
8627 tree arg3)
8629 tree inner, fndecl, inner_arg0;
8630 enum tree_code code;
8632 /* Distribute the expected value over short-circuiting operators.
8633 See through the cast from truthvalue_type_node to long. */
8634 inner_arg0 = arg0;
8635 while (CONVERT_EXPR_P (inner_arg0)
8636 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8637 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8638 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8640 /* If this is a builtin_expect within a builtin_expect keep the
8641 inner one. See through a comparison against a constant. It
8642 might have been added to create a thruthvalue. */
8643 inner = inner_arg0;
8645 if (COMPARISON_CLASS_P (inner)
8646 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8647 inner = TREE_OPERAND (inner, 0);
8649 if (TREE_CODE (inner) == CALL_EXPR
8650 && (fndecl = get_callee_fndecl (inner))
8651 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT,
8652 BUILT_IN_EXPECT_WITH_PROBABILITY))
8653 return arg0;
8655 inner = inner_arg0;
8656 code = TREE_CODE (inner);
8657 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8659 tree op0 = TREE_OPERAND (inner, 0);
8660 tree op1 = TREE_OPERAND (inner, 1);
8661 arg1 = save_expr (arg1);
8663 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
8664 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
8665 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8667 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8670 /* If the argument isn't invariant then there's nothing else we can do. */
8671 if (!TREE_CONSTANT (inner_arg0))
8672 return NULL_TREE;
8674 /* If we expect that a comparison against the argument will fold to
8675 a constant return the constant. In practice, this means a true
8676 constant or the address of a non-weak symbol. */
8677 inner = inner_arg0;
8678 STRIP_NOPS (inner);
8679 if (TREE_CODE (inner) == ADDR_EXPR)
8683 inner = TREE_OPERAND (inner, 0);
8685 while (TREE_CODE (inner) == COMPONENT_REF
8686 || TREE_CODE (inner) == ARRAY_REF);
8687 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8688 return NULL_TREE;
8691 /* Otherwise, ARG0 already has the proper type for the return value. */
8692 return arg0;
8695 /* Fold a call to __builtin_classify_type with argument ARG. */
8697 static tree
8698 fold_builtin_classify_type (tree arg)
8700 if (arg == 0)
8701 return build_int_cst (integer_type_node, no_type_class);
8703 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8706 /* Fold a call EXPR (which may be null) to __builtin_strlen with argument
8707 ARG. */
8709 static tree
8710 fold_builtin_strlen (location_t loc, tree expr, tree type, tree arg)
8712 if (!validate_arg (arg, POINTER_TYPE))
8713 return NULL_TREE;
8714 else
8716 c_strlen_data lendata = { };
8717 tree len = c_strlen (arg, 0, &lendata);
8719 if (len)
8720 return fold_convert_loc (loc, type, len);
8722 /* TODO: Move this to gimple-ssa-warn-access once the pass runs
8723 also early enough to detect invalid reads in multimensional
8724 arrays and struct members. */
8725 if (!lendata.decl)
8726 c_strlen (arg, 1, &lendata);
8728 if (lendata.decl)
8730 if (EXPR_HAS_LOCATION (arg))
8731 loc = EXPR_LOCATION (arg);
8732 else if (loc == UNKNOWN_LOCATION)
8733 loc = input_location;
8734 warn_string_no_nul (loc, expr, "strlen", arg, lendata.decl);
8737 return NULL_TREE;
8741 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8743 static tree
8744 fold_builtin_inf (location_t loc, tree type, int warn)
8746 /* __builtin_inff is intended to be usable to define INFINITY on all
8747 targets. If an infinity is not available, INFINITY expands "to a
8748 positive constant of type float that overflows at translation
8749 time", footnote "In this case, using INFINITY will violate the
8750 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8751 Thus we pedwarn to ensure this constraint violation is
8752 diagnosed. */
8753 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8754 pedwarn (loc, 0, "target format does not support infinity");
8756 return build_real (type, dconstinf);
8759 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8760 NULL_TREE if no simplification can be made. */
8762 static tree
8763 fold_builtin_sincos (location_t loc,
8764 tree arg0, tree arg1, tree arg2)
8766 tree type;
8767 tree fndecl, call = NULL_TREE;
8769 if (!validate_arg (arg0, REAL_TYPE)
8770 || !validate_arg (arg1, POINTER_TYPE)
8771 || !validate_arg (arg2, POINTER_TYPE))
8772 return NULL_TREE;
8774 type = TREE_TYPE (arg0);
8776 /* Calculate the result when the argument is a constant. */
8777 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8778 if (fn == END_BUILTINS)
8779 return NULL_TREE;
8781 /* Canonicalize sincos to cexpi. */
8782 if (TREE_CODE (arg0) == REAL_CST)
8784 tree complex_type = build_complex_type (type);
8785 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8787 if (!call)
8789 if (!targetm.libc_has_function (function_c99_math_complex, type)
8790 || !builtin_decl_implicit_p (fn))
8791 return NULL_TREE;
8792 fndecl = builtin_decl_explicit (fn);
8793 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8794 call = builtin_save_expr (call);
8797 tree ptype = build_pointer_type (type);
8798 arg1 = fold_convert (ptype, arg1);
8799 arg2 = fold_convert (ptype, arg2);
8800 return build2 (COMPOUND_EXPR, void_type_node,
8801 build2 (MODIFY_EXPR, void_type_node,
8802 build_fold_indirect_ref_loc (loc, arg1),
8803 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8804 build2 (MODIFY_EXPR, void_type_node,
8805 build_fold_indirect_ref_loc (loc, arg2),
8806 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8809 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8810 Return NULL_TREE if no simplification can be made. */
8812 static tree
8813 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8815 if (!validate_arg (arg1, POINTER_TYPE)
8816 || !validate_arg (arg2, POINTER_TYPE)
8817 || !validate_arg (len, INTEGER_TYPE))
8818 return NULL_TREE;
8820 /* If the LEN parameter is zero, return zero. */
8821 if (integer_zerop (len))
8822 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8823 arg1, arg2);
8825 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8826 if (operand_equal_p (arg1, arg2, 0))
8827 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8829 /* If len parameter is one, return an expression corresponding to
8830 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8831 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8833 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8834 tree cst_uchar_ptr_node
8835 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8837 tree ind1
8838 = fold_convert_loc (loc, integer_type_node,
8839 build1 (INDIRECT_REF, cst_uchar_node,
8840 fold_convert_loc (loc,
8841 cst_uchar_ptr_node,
8842 arg1)));
8843 tree ind2
8844 = fold_convert_loc (loc, integer_type_node,
8845 build1 (INDIRECT_REF, cst_uchar_node,
8846 fold_convert_loc (loc,
8847 cst_uchar_ptr_node,
8848 arg2)));
8849 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8852 return NULL_TREE;
8855 /* Fold a call to builtin isascii with argument ARG. */
8857 static tree
8858 fold_builtin_isascii (location_t loc, tree arg)
8860 if (!validate_arg (arg, INTEGER_TYPE))
8861 return NULL_TREE;
8862 else
8864 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8865 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8866 build_int_cst (integer_type_node,
8867 ~ (unsigned HOST_WIDE_INT) 0x7f));
8868 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8869 arg, integer_zero_node);
8873 /* Fold a call to builtin toascii with argument ARG. */
8875 static tree
8876 fold_builtin_toascii (location_t loc, tree arg)
8878 if (!validate_arg (arg, INTEGER_TYPE))
8879 return NULL_TREE;
8881 /* Transform toascii(c) -> (c & 0x7f). */
8882 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8883 build_int_cst (integer_type_node, 0x7f));
8886 /* Fold a call to builtin isdigit with argument ARG. */
8888 static tree
8889 fold_builtin_isdigit (location_t loc, tree arg)
8891 if (!validate_arg (arg, INTEGER_TYPE))
8892 return NULL_TREE;
8893 else
8895 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8896 /* According to the C standard, isdigit is unaffected by locale.
8897 However, it definitely is affected by the target character set. */
8898 unsigned HOST_WIDE_INT target_digit0
8899 = lang_hooks.to_target_charset ('0');
8901 if (target_digit0 == 0)
8902 return NULL_TREE;
8904 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8905 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8906 build_int_cst (unsigned_type_node, target_digit0));
8907 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8908 build_int_cst (unsigned_type_node, 9));
8912 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8914 static tree
8915 fold_builtin_fabs (location_t loc, tree arg, tree type)
8917 if (!validate_arg (arg, REAL_TYPE))
8918 return NULL_TREE;
8920 arg = fold_convert_loc (loc, type, arg);
8921 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8924 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8926 static tree
8927 fold_builtin_abs (location_t loc, tree arg, tree type)
8929 if (!validate_arg (arg, INTEGER_TYPE))
8930 return NULL_TREE;
8932 arg = fold_convert_loc (loc, type, arg);
8933 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8936 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8938 static tree
8939 fold_builtin_carg (location_t loc, tree arg, tree type)
8941 if (validate_arg (arg, COMPLEX_TYPE)
8942 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_TYPE (arg))))
8944 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8946 if (atan2_fn)
8948 tree new_arg = builtin_save_expr (arg);
8949 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8950 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8951 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8955 return NULL_TREE;
8958 /* Fold a call to builtin frexp, we can assume the base is 2. */
8960 static tree
8961 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8963 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8964 return NULL_TREE;
8966 STRIP_NOPS (arg0);
8968 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8969 return NULL_TREE;
8971 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8973 /* Proceed if a valid pointer type was passed in. */
8974 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8976 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8977 tree frac, exp, res;
8979 switch (value->cl)
8981 case rvc_zero:
8982 /* For +-0, return (*exp = 0, +-0). */
8983 exp = integer_zero_node;
8984 frac = arg0;
8985 break;
8986 case rvc_nan:
8987 case rvc_inf:
8988 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8989 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8990 case rvc_normal:
8992 /* Since the frexp function always expects base 2, and in
8993 GCC normalized significands are already in the range
8994 [0.5, 1.0), we have exactly what frexp wants. */
8995 REAL_VALUE_TYPE frac_rvt = *value;
8996 SET_REAL_EXP (&frac_rvt, 0);
8997 frac = build_real (rettype, frac_rvt);
8998 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9000 break;
9001 default:
9002 gcc_unreachable ();
9005 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9006 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9007 TREE_SIDE_EFFECTS (arg1) = 1;
9008 res = fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9009 suppress_warning (res, OPT_Wunused_value);
9010 return res;
9013 return NULL_TREE;
9016 /* Fold a call to builtin modf. */
9018 static tree
9019 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9021 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9022 return NULL_TREE;
9024 STRIP_NOPS (arg0);
9026 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9027 return NULL_TREE;
9029 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9031 /* Proceed if a valid pointer type was passed in. */
9032 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9034 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9035 REAL_VALUE_TYPE trunc, frac;
9036 tree res;
9038 switch (value->cl)
9040 case rvc_nan:
9041 case rvc_zero:
9042 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9043 trunc = frac = *value;
9044 break;
9045 case rvc_inf:
9046 /* For +-Inf, return (*arg1 = arg0, +-0). */
9047 frac = dconst0;
9048 frac.sign = value->sign;
9049 trunc = *value;
9050 break;
9051 case rvc_normal:
9052 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9053 real_trunc (&trunc, VOIDmode, value);
9054 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9055 /* If the original number was negative and already
9056 integral, then the fractional part is -0.0. */
9057 if (value->sign && frac.cl == rvc_zero)
9058 frac.sign = value->sign;
9059 break;
9062 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9063 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9064 build_real (rettype, trunc));
9065 TREE_SIDE_EFFECTS (arg1) = 1;
9066 res = fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9067 build_real (rettype, frac));
9068 suppress_warning (res, OPT_Wunused_value);
9069 return res;
9072 return NULL_TREE;
9075 /* Given a location LOC, an interclass builtin function decl FNDECL
9076 and its single argument ARG, return an folded expression computing
9077 the same, or NULL_TREE if we either couldn't or didn't want to fold
9078 (the latter happen if there's an RTL instruction available). */
9080 static tree
9081 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9083 machine_mode mode;
9085 if (!validate_arg (arg, REAL_TYPE))
9086 return NULL_TREE;
9088 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9089 return NULL_TREE;
9091 mode = TYPE_MODE (TREE_TYPE (arg));
9093 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
9095 /* If there is no optab, try generic code. */
9096 switch (DECL_FUNCTION_CODE (fndecl))
9098 tree result;
9100 CASE_FLT_FN (BUILT_IN_ISINF):
9102 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9103 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9104 tree type = TREE_TYPE (arg);
9105 REAL_VALUE_TYPE r;
9106 char buf[128];
9108 if (is_ibm_extended)
9110 /* NaN and Inf are encoded in the high-order double value
9111 only. The low-order value is not significant. */
9112 type = double_type_node;
9113 mode = DFmode;
9114 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9116 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9117 real_from_string (&r, buf);
9118 result = build_call_expr (isgr_fn, 2,
9119 fold_build1_loc (loc, ABS_EXPR, type, arg),
9120 build_real (type, r));
9121 return result;
9123 CASE_FLT_FN (BUILT_IN_FINITE):
9124 case BUILT_IN_ISFINITE:
9126 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9127 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9128 tree type = TREE_TYPE (arg);
9129 REAL_VALUE_TYPE r;
9130 char buf[128];
9132 if (is_ibm_extended)
9134 /* NaN and Inf are encoded in the high-order double value
9135 only. The low-order value is not significant. */
9136 type = double_type_node;
9137 mode = DFmode;
9138 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9140 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9141 real_from_string (&r, buf);
9142 result = build_call_expr (isle_fn, 2,
9143 fold_build1_loc (loc, ABS_EXPR, type, arg),
9144 build_real (type, r));
9145 /*result = fold_build2_loc (loc, UNGT_EXPR,
9146 TREE_TYPE (TREE_TYPE (fndecl)),
9147 fold_build1_loc (loc, ABS_EXPR, type, arg),
9148 build_real (type, r));
9149 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9150 TREE_TYPE (TREE_TYPE (fndecl)),
9151 result);*/
9152 return result;
9154 case BUILT_IN_ISNORMAL:
9156 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9157 islessequal(fabs(x),DBL_MAX). */
9158 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9159 tree type = TREE_TYPE (arg);
9160 tree orig_arg, max_exp, min_exp;
9161 machine_mode orig_mode = mode;
9162 REAL_VALUE_TYPE rmax, rmin;
9163 char buf[128];
9165 orig_arg = arg = builtin_save_expr (arg);
9166 if (is_ibm_extended)
9168 /* Use double to test the normal range of IBM extended
9169 precision. Emin for IBM extended precision is
9170 different to emin for IEEE double, being 53 higher
9171 since the low double exponent is at least 53 lower
9172 than the high double exponent. */
9173 type = double_type_node;
9174 mode = DFmode;
9175 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9177 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
9179 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9180 real_from_string (&rmax, buf);
9181 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9182 real_from_string (&rmin, buf);
9183 max_exp = build_real (type, rmax);
9184 min_exp = build_real (type, rmin);
9186 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
9187 if (is_ibm_extended)
9189 /* Testing the high end of the range is done just using
9190 the high double, using the same test as isfinite().
9191 For the subnormal end of the range we first test the
9192 high double, then if its magnitude is equal to the
9193 limit of 0x1p-969, we test whether the low double is
9194 non-zero and opposite sign to the high double. */
9195 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
9196 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9197 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
9198 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
9199 arg, min_exp);
9200 tree as_complex = build1 (VIEW_CONVERT_EXPR,
9201 complex_double_type_node, orig_arg);
9202 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
9203 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
9204 tree zero = build_real (type, dconst0);
9205 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
9206 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
9207 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
9208 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
9209 fold_build3 (COND_EXPR,
9210 integer_type_node,
9211 hilt, logt, lolt));
9212 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
9213 eq_min, ok_lo);
9214 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
9215 gt_min, eq_min);
9217 else
9219 tree const isge_fn
9220 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9221 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
9223 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
9224 max_exp, min_exp);
9225 return result;
9227 default:
9228 break;
9231 return NULL_TREE;
9234 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9235 ARG is the argument for the call. */
9237 static tree
9238 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9240 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9242 if (!validate_arg (arg, REAL_TYPE))
9243 return NULL_TREE;
9245 switch (builtin_index)
9247 case BUILT_IN_ISINF:
9248 if (tree_expr_infinite_p (arg))
9249 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9250 if (!tree_expr_maybe_infinite_p (arg))
9251 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9252 return NULL_TREE;
9254 case BUILT_IN_ISINF_SIGN:
9256 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9257 /* In a boolean context, GCC will fold the inner COND_EXPR to
9258 1. So e.g. "if (isinf_sign(x))" would be folded to just
9259 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9260 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
9261 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9262 tree tmp = NULL_TREE;
9264 arg = builtin_save_expr (arg);
9266 if (signbit_fn && isinf_fn)
9268 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9269 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9271 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9272 signbit_call, integer_zero_node);
9273 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9274 isinf_call, integer_zero_node);
9276 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9277 integer_minus_one_node, integer_one_node);
9278 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9279 isinf_call, tmp,
9280 integer_zero_node);
9283 return tmp;
9286 case BUILT_IN_ISFINITE:
9287 if (tree_expr_finite_p (arg))
9288 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9289 if (tree_expr_nan_p (arg) || tree_expr_infinite_p (arg))
9290 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9291 return NULL_TREE;
9293 case BUILT_IN_ISNAN:
9294 if (tree_expr_nan_p (arg))
9295 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9296 if (!tree_expr_maybe_nan_p (arg))
9297 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9300 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
9301 if (is_ibm_extended)
9303 /* NaN and Inf are encoded in the high-order double value
9304 only. The low-order value is not significant. */
9305 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9308 arg = builtin_save_expr (arg);
9309 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9311 case BUILT_IN_ISSIGNALING:
9312 /* Folding to true for REAL_CST is done in fold_const_call_ss.
9313 Don't use tree_expr_signaling_nan_p (arg) -> integer_one_node
9314 and !tree_expr_maybe_signaling_nan_p (arg) -> integer_zero_node
9315 here, so there is some possibility of __builtin_issignaling working
9316 without -fsignaling-nans. Especially when -fno-signaling-nans is
9317 the default. */
9318 if (!tree_expr_maybe_nan_p (arg))
9319 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9320 return NULL_TREE;
9322 default:
9323 gcc_unreachable ();
9327 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9328 This builtin will generate code to return the appropriate floating
9329 point classification depending on the value of the floating point
9330 number passed in. The possible return values must be supplied as
9331 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9332 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9333 one floating point argument which is "type generic". */
9335 static tree
9336 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9338 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9339 arg, type, res, tmp;
9340 machine_mode mode;
9341 REAL_VALUE_TYPE r;
9342 char buf[128];
9344 /* Verify the required arguments in the original call. */
9345 if (nargs != 6
9346 || !validate_arg (args[0], INTEGER_TYPE)
9347 || !validate_arg (args[1], INTEGER_TYPE)
9348 || !validate_arg (args[2], INTEGER_TYPE)
9349 || !validate_arg (args[3], INTEGER_TYPE)
9350 || !validate_arg (args[4], INTEGER_TYPE)
9351 || !validate_arg (args[5], REAL_TYPE))
9352 return NULL_TREE;
9354 fp_nan = args[0];
9355 fp_infinite = args[1];
9356 fp_normal = args[2];
9357 fp_subnormal = args[3];
9358 fp_zero = args[4];
9359 arg = args[5];
9360 type = TREE_TYPE (arg);
9361 mode = TYPE_MODE (type);
9362 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9364 /* fpclassify(x) ->
9365 isnan(x) ? FP_NAN :
9366 (fabs(x) == Inf ? FP_INFINITE :
9367 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9368 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9370 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9371 build_real (type, dconst0));
9372 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9373 tmp, fp_zero, fp_subnormal);
9375 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9376 real_from_string (&r, buf);
9377 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9378 arg, build_real (type, r));
9379 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9381 if (tree_expr_maybe_infinite_p (arg))
9383 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9384 build_real (type, dconstinf));
9385 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9386 fp_infinite, res);
9389 if (tree_expr_maybe_nan_p (arg))
9391 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9392 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9395 return res;
9398 /* Fold a call to an unordered comparison function such as
9399 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9400 being called and ARG0 and ARG1 are the arguments for the call.
9401 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9402 the opposite of the desired result. UNORDERED_CODE is used
9403 for modes that can hold NaNs and ORDERED_CODE is used for
9404 the rest. */
9406 static tree
9407 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9408 enum tree_code unordered_code,
9409 enum tree_code ordered_code)
9411 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9412 enum tree_code code;
9413 tree type0, type1;
9414 enum tree_code code0, code1;
9415 tree cmp_type = NULL_TREE;
9417 type0 = TREE_TYPE (arg0);
9418 type1 = TREE_TYPE (arg1);
9420 code0 = TREE_CODE (type0);
9421 code1 = TREE_CODE (type1);
9423 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9424 /* Choose the wider of two real types. */
9425 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9426 ? type0 : type1;
9427 else if (code0 == REAL_TYPE
9428 && (code1 == INTEGER_TYPE || code1 == BITINT_TYPE))
9429 cmp_type = type0;
9430 else if ((code0 == INTEGER_TYPE || code0 == BITINT_TYPE)
9431 && code1 == REAL_TYPE)
9432 cmp_type = type1;
9434 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9435 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9437 if (unordered_code == UNORDERED_EXPR)
9439 if (tree_expr_nan_p (arg0) || tree_expr_nan_p (arg1))
9440 return omit_two_operands_loc (loc, type, integer_one_node, arg0, arg1);
9441 if (!tree_expr_maybe_nan_p (arg0) && !tree_expr_maybe_nan_p (arg1))
9442 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9443 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9446 code = (tree_expr_maybe_nan_p (arg0) || tree_expr_maybe_nan_p (arg1))
9447 ? unordered_code : ordered_code;
9448 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9449 fold_build2_loc (loc, code, type, arg0, arg1));
9452 /* Fold a call to __builtin_iseqsig(). ARG0 and ARG1 are the arguments.
9453 After choosing the wider floating-point type for the comparison,
9454 the code is folded to:
9455 SAVE_EXPR<ARG0> >= SAVE_EXPR<ARG1> && SAVE_EXPR<ARG0> <= SAVE_EXPR<ARG1> */
9457 static tree
9458 fold_builtin_iseqsig (location_t loc, tree arg0, tree arg1)
9460 tree type0, type1;
9461 enum tree_code code0, code1;
9462 tree cmp1, cmp2, cmp_type = NULL_TREE;
9464 type0 = TREE_TYPE (arg0);
9465 type1 = TREE_TYPE (arg1);
9467 code0 = TREE_CODE (type0);
9468 code1 = TREE_CODE (type1);
9470 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9471 /* Choose the wider of two real types. */
9472 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9473 ? type0 : type1;
9474 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9475 cmp_type = type0;
9476 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9477 cmp_type = type1;
9479 arg0 = builtin_save_expr (fold_convert_loc (loc, cmp_type, arg0));
9480 arg1 = builtin_save_expr (fold_convert_loc (loc, cmp_type, arg1));
9482 cmp1 = fold_build2_loc (loc, GE_EXPR, integer_type_node, arg0, arg1);
9483 cmp2 = fold_build2_loc (loc, LE_EXPR, integer_type_node, arg0, arg1);
9485 return fold_build2_loc (loc, TRUTH_AND_EXPR, integer_type_node, cmp1, cmp2);
9488 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9489 arithmetics if it can never overflow, or into internal functions that
9490 return both result of arithmetics and overflowed boolean flag in
9491 a complex integer result, or some other check for overflow.
9492 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9493 checking part of that. */
9495 static tree
9496 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9497 tree arg0, tree arg1, tree arg2)
9499 enum internal_fn ifn = IFN_LAST;
9500 /* The code of the expression corresponding to the built-in. */
9501 enum tree_code opcode = ERROR_MARK;
9502 bool ovf_only = false;
9504 switch (fcode)
9506 case BUILT_IN_ADD_OVERFLOW_P:
9507 ovf_only = true;
9508 /* FALLTHRU */
9509 case BUILT_IN_ADD_OVERFLOW:
9510 case BUILT_IN_SADD_OVERFLOW:
9511 case BUILT_IN_SADDL_OVERFLOW:
9512 case BUILT_IN_SADDLL_OVERFLOW:
9513 case BUILT_IN_UADD_OVERFLOW:
9514 case BUILT_IN_UADDL_OVERFLOW:
9515 case BUILT_IN_UADDLL_OVERFLOW:
9516 opcode = PLUS_EXPR;
9517 ifn = IFN_ADD_OVERFLOW;
9518 break;
9519 case BUILT_IN_SUB_OVERFLOW_P:
9520 ovf_only = true;
9521 /* FALLTHRU */
9522 case BUILT_IN_SUB_OVERFLOW:
9523 case BUILT_IN_SSUB_OVERFLOW:
9524 case BUILT_IN_SSUBL_OVERFLOW:
9525 case BUILT_IN_SSUBLL_OVERFLOW:
9526 case BUILT_IN_USUB_OVERFLOW:
9527 case BUILT_IN_USUBL_OVERFLOW:
9528 case BUILT_IN_USUBLL_OVERFLOW:
9529 opcode = MINUS_EXPR;
9530 ifn = IFN_SUB_OVERFLOW;
9531 break;
9532 case BUILT_IN_MUL_OVERFLOW_P:
9533 ovf_only = true;
9534 /* FALLTHRU */
9535 case BUILT_IN_MUL_OVERFLOW:
9536 case BUILT_IN_SMUL_OVERFLOW:
9537 case BUILT_IN_SMULL_OVERFLOW:
9538 case BUILT_IN_SMULLL_OVERFLOW:
9539 case BUILT_IN_UMUL_OVERFLOW:
9540 case BUILT_IN_UMULL_OVERFLOW:
9541 case BUILT_IN_UMULLL_OVERFLOW:
9542 opcode = MULT_EXPR;
9543 ifn = IFN_MUL_OVERFLOW;
9544 break;
9545 default:
9546 gcc_unreachable ();
9549 /* For the "generic" overloads, the first two arguments can have different
9550 types and the last argument determines the target type to use to check
9551 for overflow. The arguments of the other overloads all have the same
9552 type. */
9553 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9555 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9556 arguments are constant, attempt to fold the built-in call into a constant
9557 expression indicating whether or not it detected an overflow. */
9558 if (ovf_only
9559 && TREE_CODE (arg0) == INTEGER_CST
9560 && TREE_CODE (arg1) == INTEGER_CST)
9561 /* Perform the computation in the target type and check for overflow. */
9562 return omit_one_operand_loc (loc, boolean_type_node,
9563 arith_overflowed_p (opcode, type, arg0, arg1)
9564 ? boolean_true_node : boolean_false_node,
9565 arg2);
9567 tree intres, ovfres;
9568 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9570 intres = fold_binary_loc (loc, opcode, type,
9571 fold_convert_loc (loc, type, arg0),
9572 fold_convert_loc (loc, type, arg1));
9573 if (TREE_OVERFLOW (intres))
9574 intres = drop_tree_overflow (intres);
9575 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
9576 ? boolean_true_node : boolean_false_node);
9578 else
9580 tree ctype = build_complex_type (type);
9581 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
9582 arg0, arg1);
9583 tree tgt = save_expr (call);
9584 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9585 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9586 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9589 if (ovf_only)
9590 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9592 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9593 tree store
9594 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9595 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9598 /* Fold __builtin_{add,sub}c{,l,ll} into pair of internal functions
9599 that return both result of arithmetics and overflowed boolean
9600 flag in a complex integer result. */
9602 static tree
9603 fold_builtin_addc_subc (location_t loc, enum built_in_function fcode,
9604 tree *args)
9606 enum internal_fn ifn;
9608 switch (fcode)
9610 case BUILT_IN_ADDC:
9611 case BUILT_IN_ADDCL:
9612 case BUILT_IN_ADDCLL:
9613 ifn = IFN_ADD_OVERFLOW;
9614 break;
9615 case BUILT_IN_SUBC:
9616 case BUILT_IN_SUBCL:
9617 case BUILT_IN_SUBCLL:
9618 ifn = IFN_SUB_OVERFLOW;
9619 break;
9620 default:
9621 gcc_unreachable ();
9624 tree type = TREE_TYPE (args[0]);
9625 tree ctype = build_complex_type (type);
9626 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
9627 args[0], args[1]);
9628 tree tgt = save_expr (call);
9629 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9630 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9631 call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
9632 intres, args[2]);
9633 tgt = save_expr (call);
9634 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9635 tree ovfres2 = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9636 ovfres = build2_loc (loc, BIT_IOR_EXPR, type, ovfres, ovfres2);
9637 tree mem_arg3 = build_fold_indirect_ref_loc (loc, args[3]);
9638 tree store
9639 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg3, ovfres);
9640 return build2_loc (loc, COMPOUND_EXPR, type, store, intres);
9643 /* Fold a call to __builtin_FILE to a constant string. */
9645 static inline tree
9646 fold_builtin_FILE (location_t loc)
9648 if (const char *fname = LOCATION_FILE (loc))
9650 /* The documentation says this builtin is equivalent to the preprocessor
9651 __FILE__ macro so it appears appropriate to use the same file prefix
9652 mappings. */
9653 fname = remap_macro_filename (fname);
9654 return build_string_literal (fname);
9657 return build_string_literal ("");
9660 /* Fold a call to __builtin_FUNCTION to a constant string. */
9662 static inline tree
9663 fold_builtin_FUNCTION ()
9665 const char *name = "";
9667 if (current_function_decl)
9668 name = lang_hooks.decl_printable_name (current_function_decl, 0);
9670 return build_string_literal (name);
9673 /* Fold a call to __builtin_LINE to an integer constant. */
9675 static inline tree
9676 fold_builtin_LINE (location_t loc, tree type)
9678 return build_int_cst (type, LOCATION_LINE (loc));
9681 /* Fold a call to built-in function FNDECL with 0 arguments.
9682 This function returns NULL_TREE if no simplification was possible. */
9684 static tree
9685 fold_builtin_0 (location_t loc, tree fndecl)
9687 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9688 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9689 switch (fcode)
9691 case BUILT_IN_FILE:
9692 return fold_builtin_FILE (loc);
9694 case BUILT_IN_FUNCTION:
9695 return fold_builtin_FUNCTION ();
9697 case BUILT_IN_LINE:
9698 return fold_builtin_LINE (loc, type);
9700 CASE_FLT_FN (BUILT_IN_INF):
9701 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
9702 case BUILT_IN_INFD32:
9703 case BUILT_IN_INFD64:
9704 case BUILT_IN_INFD128:
9705 return fold_builtin_inf (loc, type, true);
9707 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9708 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
9709 return fold_builtin_inf (loc, type, false);
9711 case BUILT_IN_CLASSIFY_TYPE:
9712 return fold_builtin_classify_type (NULL_TREE);
9714 case BUILT_IN_UNREACHABLE:
9715 /* Rewrite any explicit calls to __builtin_unreachable. */
9716 if (sanitize_flags_p (SANITIZE_UNREACHABLE))
9717 return build_builtin_unreachable (loc);
9718 break;
9720 default:
9721 break;
9723 return NULL_TREE;
9726 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9727 This function returns NULL_TREE if no simplification was possible. */
9729 static tree
9730 fold_builtin_1 (location_t loc, tree expr, tree fndecl, tree arg0)
9732 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9733 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9735 if (TREE_CODE (arg0) == ERROR_MARK)
9736 return NULL_TREE;
9738 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
9739 return ret;
9741 switch (fcode)
9743 case BUILT_IN_CONSTANT_P:
9745 tree val = fold_builtin_constant_p (arg0);
9747 /* Gimplification will pull the CALL_EXPR for the builtin out of
9748 an if condition. When not optimizing, we'll not CSE it back.
9749 To avoid link error types of regressions, return false now. */
9750 if (!val && !optimize)
9751 val = integer_zero_node;
9753 return val;
9756 case BUILT_IN_CLASSIFY_TYPE:
9757 return fold_builtin_classify_type (arg0);
9759 case BUILT_IN_STRLEN:
9760 return fold_builtin_strlen (loc, expr, type, arg0);
9762 CASE_FLT_FN (BUILT_IN_FABS):
9763 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9764 case BUILT_IN_FABSD32:
9765 case BUILT_IN_FABSD64:
9766 case BUILT_IN_FABSD128:
9767 return fold_builtin_fabs (loc, arg0, type);
9769 case BUILT_IN_ABS:
9770 case BUILT_IN_LABS:
9771 case BUILT_IN_LLABS:
9772 case BUILT_IN_IMAXABS:
9773 return fold_builtin_abs (loc, arg0, type);
9775 CASE_FLT_FN (BUILT_IN_CONJ):
9776 if (validate_arg (arg0, COMPLEX_TYPE)
9777 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9778 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9779 break;
9781 CASE_FLT_FN (BUILT_IN_CREAL):
9782 if (validate_arg (arg0, COMPLEX_TYPE)
9783 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9784 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9785 break;
9787 CASE_FLT_FN (BUILT_IN_CIMAG):
9788 if (validate_arg (arg0, COMPLEX_TYPE)
9789 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9790 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9791 break;
9793 CASE_FLT_FN (BUILT_IN_CARG):
9794 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CARG):
9795 return fold_builtin_carg (loc, arg0, type);
9797 case BUILT_IN_ISASCII:
9798 return fold_builtin_isascii (loc, arg0);
9800 case BUILT_IN_TOASCII:
9801 return fold_builtin_toascii (loc, arg0);
9803 case BUILT_IN_ISDIGIT:
9804 return fold_builtin_isdigit (loc, arg0);
9806 CASE_FLT_FN (BUILT_IN_FINITE):
9807 case BUILT_IN_FINITED32:
9808 case BUILT_IN_FINITED64:
9809 case BUILT_IN_FINITED128:
9810 case BUILT_IN_ISFINITE:
9812 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9813 if (ret)
9814 return ret;
9815 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9818 CASE_FLT_FN (BUILT_IN_ISINF):
9819 case BUILT_IN_ISINFD32:
9820 case BUILT_IN_ISINFD64:
9821 case BUILT_IN_ISINFD128:
9823 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9824 if (ret)
9825 return ret;
9826 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9829 case BUILT_IN_ISNORMAL:
9830 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9832 case BUILT_IN_ISINF_SIGN:
9833 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9835 CASE_FLT_FN (BUILT_IN_ISNAN):
9836 case BUILT_IN_ISNAND32:
9837 case BUILT_IN_ISNAND64:
9838 case BUILT_IN_ISNAND128:
9839 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9841 case BUILT_IN_ISSIGNALING:
9842 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISSIGNALING);
9844 case BUILT_IN_FREE:
9845 if (integer_zerop (arg0))
9846 return build_empty_stmt (loc);
9847 break;
9849 default:
9850 break;
9853 return NULL_TREE;
9857 /* Folds a call EXPR (which may be null) to built-in function FNDECL
9858 with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
9859 if no simplification was possible. */
9861 static tree
9862 fold_builtin_2 (location_t loc, tree expr, tree fndecl, tree arg0, tree arg1)
9864 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9865 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9867 if (TREE_CODE (arg0) == ERROR_MARK
9868 || TREE_CODE (arg1) == ERROR_MARK)
9869 return NULL_TREE;
9871 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9872 return ret;
9874 switch (fcode)
9876 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9877 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9878 if (validate_arg (arg0, REAL_TYPE)
9879 && validate_arg (arg1, POINTER_TYPE))
9880 return do_mpfr_lgamma_r (arg0, arg1, type);
9881 break;
9883 CASE_FLT_FN (BUILT_IN_FREXP):
9884 return fold_builtin_frexp (loc, arg0, arg1, type);
9886 CASE_FLT_FN (BUILT_IN_MODF):
9887 return fold_builtin_modf (loc, arg0, arg1, type);
9889 case BUILT_IN_STRSPN:
9890 return fold_builtin_strspn (loc, expr, arg0, arg1);
9892 case BUILT_IN_STRCSPN:
9893 return fold_builtin_strcspn (loc, expr, arg0, arg1);
9895 case BUILT_IN_STRPBRK:
9896 return fold_builtin_strpbrk (loc, expr, arg0, arg1, type);
9898 case BUILT_IN_EXPECT:
9899 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
9901 case BUILT_IN_ISGREATER:
9902 return fold_builtin_unordered_cmp (loc, fndecl,
9903 arg0, arg1, UNLE_EXPR, LE_EXPR);
9904 case BUILT_IN_ISGREATEREQUAL:
9905 return fold_builtin_unordered_cmp (loc, fndecl,
9906 arg0, arg1, UNLT_EXPR, LT_EXPR);
9907 case BUILT_IN_ISLESS:
9908 return fold_builtin_unordered_cmp (loc, fndecl,
9909 arg0, arg1, UNGE_EXPR, GE_EXPR);
9910 case BUILT_IN_ISLESSEQUAL:
9911 return fold_builtin_unordered_cmp (loc, fndecl,
9912 arg0, arg1, UNGT_EXPR, GT_EXPR);
9913 case BUILT_IN_ISLESSGREATER:
9914 return fold_builtin_unordered_cmp (loc, fndecl,
9915 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9916 case BUILT_IN_ISUNORDERED:
9917 return fold_builtin_unordered_cmp (loc, fndecl,
9918 arg0, arg1, UNORDERED_EXPR,
9919 NOP_EXPR);
9921 case BUILT_IN_ISEQSIG:
9922 return fold_builtin_iseqsig (loc, arg0, arg1);
9924 /* We do the folding for va_start in the expander. */
9925 case BUILT_IN_VA_START:
9926 break;
9928 case BUILT_IN_OBJECT_SIZE:
9929 case BUILT_IN_DYNAMIC_OBJECT_SIZE:
9930 return fold_builtin_object_size (arg0, arg1, fcode);
9932 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9933 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9935 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9936 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9938 default:
9939 break;
9941 return NULL_TREE;
9944 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9945 and ARG2.
9946 This function returns NULL_TREE if no simplification was possible. */
9948 static tree
9949 fold_builtin_3 (location_t loc, tree fndecl,
9950 tree arg0, tree arg1, tree arg2)
9952 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9953 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9955 if (TREE_CODE (arg0) == ERROR_MARK
9956 || TREE_CODE (arg1) == ERROR_MARK
9957 || TREE_CODE (arg2) == ERROR_MARK)
9958 return NULL_TREE;
9960 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9961 arg0, arg1, arg2))
9962 return ret;
9964 switch (fcode)
9967 CASE_FLT_FN (BUILT_IN_SINCOS):
9968 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9970 CASE_FLT_FN (BUILT_IN_REMQUO):
9971 if (validate_arg (arg0, REAL_TYPE)
9972 && validate_arg (arg1, REAL_TYPE)
9973 && validate_arg (arg2, POINTER_TYPE))
9974 return do_mpfr_remquo (arg0, arg1, arg2);
9975 break;
9977 case BUILT_IN_MEMCMP:
9978 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9980 case BUILT_IN_EXPECT:
9981 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
9983 case BUILT_IN_EXPECT_WITH_PROBABILITY:
9984 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
9986 case BUILT_IN_ADD_OVERFLOW:
9987 case BUILT_IN_SUB_OVERFLOW:
9988 case BUILT_IN_MUL_OVERFLOW:
9989 case BUILT_IN_ADD_OVERFLOW_P:
9990 case BUILT_IN_SUB_OVERFLOW_P:
9991 case BUILT_IN_MUL_OVERFLOW_P:
9992 case BUILT_IN_SADD_OVERFLOW:
9993 case BUILT_IN_SADDL_OVERFLOW:
9994 case BUILT_IN_SADDLL_OVERFLOW:
9995 case BUILT_IN_SSUB_OVERFLOW:
9996 case BUILT_IN_SSUBL_OVERFLOW:
9997 case BUILT_IN_SSUBLL_OVERFLOW:
9998 case BUILT_IN_SMUL_OVERFLOW:
9999 case BUILT_IN_SMULL_OVERFLOW:
10000 case BUILT_IN_SMULLL_OVERFLOW:
10001 case BUILT_IN_UADD_OVERFLOW:
10002 case BUILT_IN_UADDL_OVERFLOW:
10003 case BUILT_IN_UADDLL_OVERFLOW:
10004 case BUILT_IN_USUB_OVERFLOW:
10005 case BUILT_IN_USUBL_OVERFLOW:
10006 case BUILT_IN_USUBLL_OVERFLOW:
10007 case BUILT_IN_UMUL_OVERFLOW:
10008 case BUILT_IN_UMULL_OVERFLOW:
10009 case BUILT_IN_UMULLL_OVERFLOW:
10010 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10012 default:
10013 break;
10015 return NULL_TREE;
10018 /* Folds a call EXPR (which may be null) to built-in function FNDECL.
10019 ARGS is an array of NARGS arguments. IGNORE is true if the result
10020 of the function call is ignored. This function returns NULL_TREE
10021 if no simplification was possible. */
10023 static tree
10024 fold_builtin_n (location_t loc, tree expr, tree fndecl, tree *args,
10025 int nargs, bool)
10027 tree ret = NULL_TREE;
10029 switch (nargs)
10031 case 0:
10032 ret = fold_builtin_0 (loc, fndecl);
10033 break;
10034 case 1:
10035 ret = fold_builtin_1 (loc, expr, fndecl, args[0]);
10036 break;
10037 case 2:
10038 ret = fold_builtin_2 (loc, expr, fndecl, args[0], args[1]);
10039 break;
10040 case 3:
10041 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
10042 break;
10043 default:
10044 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
10045 break;
10047 if (ret)
10049 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10050 SET_EXPR_LOCATION (ret, loc);
10051 return ret;
10053 return NULL_TREE;
10056 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10057 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10058 of arguments in ARGS to be omitted. OLDNARGS is the number of
10059 elements in ARGS. */
10061 static tree
10062 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10063 int skip, tree fndecl, int n, va_list newargs)
10065 int nargs = oldnargs - skip + n;
10066 tree *buffer;
10068 if (n > 0)
10070 int i, j;
10072 buffer = XALLOCAVEC (tree, nargs);
10073 for (i = 0; i < n; i++)
10074 buffer[i] = va_arg (newargs, tree);
10075 for (j = skip; j < oldnargs; j++, i++)
10076 buffer[i] = args[j];
10078 else
10079 buffer = args + skip;
10081 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10084 /* Return true if FNDECL shouldn't be folded right now.
10085 If a built-in function has an inline attribute always_inline
10086 wrapper, defer folding it after always_inline functions have
10087 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10088 might not be performed. */
10090 bool
10091 avoid_folding_inline_builtin (tree fndecl)
10093 return (DECL_DECLARED_INLINE_P (fndecl)
10094 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10095 && cfun
10096 && !cfun->always_inline_functions_inlined
10097 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10100 /* A wrapper function for builtin folding that prevents warnings for
10101 "statement without effect" and the like, caused by removing the
10102 call node earlier than the warning is generated. */
10104 tree
10105 fold_call_expr (location_t loc, tree exp, bool ignore)
10107 tree ret = NULL_TREE;
10108 tree fndecl = get_callee_fndecl (exp);
10109 if (fndecl && fndecl_built_in_p (fndecl)
10110 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10111 yet. Defer folding until we see all the arguments
10112 (after inlining). */
10113 && !CALL_EXPR_VA_ARG_PACK (exp))
10115 int nargs = call_expr_nargs (exp);
10117 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10118 instead last argument is __builtin_va_arg_pack (). Defer folding
10119 even in that case, until arguments are finalized. */
10120 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10122 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10123 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
10124 return NULL_TREE;
10127 if (avoid_folding_inline_builtin (fndecl))
10128 return NULL_TREE;
10130 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10131 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10132 CALL_EXPR_ARGP (exp), ignore);
10133 else
10135 tree *args = CALL_EXPR_ARGP (exp);
10136 ret = fold_builtin_n (loc, exp, fndecl, args, nargs, ignore);
10137 if (ret)
10138 return ret;
10141 return NULL_TREE;
10144 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10145 N arguments are passed in the array ARGARRAY. Return a folded
10146 expression or NULL_TREE if no simplification was possible. */
10148 tree
10149 fold_builtin_call_array (location_t loc, tree,
10150 tree fn,
10151 int n,
10152 tree *argarray)
10154 if (TREE_CODE (fn) != ADDR_EXPR)
10155 return NULL_TREE;
10157 tree fndecl = TREE_OPERAND (fn, 0);
10158 if (TREE_CODE (fndecl) == FUNCTION_DECL
10159 && fndecl_built_in_p (fndecl))
10161 /* If last argument is __builtin_va_arg_pack (), arguments to this
10162 function are not finalized yet. Defer folding until they are. */
10163 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10165 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10166 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
10167 return NULL_TREE;
10169 if (avoid_folding_inline_builtin (fndecl))
10170 return NULL_TREE;
10171 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10172 return targetm.fold_builtin (fndecl, n, argarray, false);
10173 else
10174 return fold_builtin_n (loc, NULL_TREE, fndecl, argarray, n, false);
10177 return NULL_TREE;
10180 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10181 along with N new arguments specified as the "..." parameters. SKIP
10182 is the number of arguments in EXP to be omitted. This function is used
10183 to do varargs-to-varargs transformations. */
10185 static tree
10186 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10188 va_list ap;
10189 tree t;
10191 va_start (ap, n);
10192 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10193 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10194 va_end (ap);
10196 return t;
10199 /* Validate a single argument ARG against a tree code CODE representing
10200 a type. Return true when argument is valid. */
10202 static bool
10203 validate_arg (const_tree arg, enum tree_code code)
10205 if (!arg)
10206 return false;
10207 else if (code == POINTER_TYPE)
10208 return POINTER_TYPE_P (TREE_TYPE (arg));
10209 else if (code == INTEGER_TYPE)
10210 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10211 return code == TREE_CODE (TREE_TYPE (arg));
10214 /* This function validates the types of a function call argument list
10215 against a specified list of tree_codes. If the last specifier is a 0,
10216 that represents an ellipses, otherwise the last specifier must be a
10217 VOID_TYPE.
10219 This is the GIMPLE version of validate_arglist. Eventually we want to
10220 completely convert builtins.cc to work from GIMPLEs and the tree based
10221 validate_arglist will then be removed. */
10223 bool
10224 validate_gimple_arglist (const gcall *call, ...)
10226 enum tree_code code;
10227 bool res = 0;
10228 va_list ap;
10229 const_tree arg;
10230 size_t i;
10232 va_start (ap, call);
10233 i = 0;
10237 code = (enum tree_code) va_arg (ap, int);
10238 switch (code)
10240 case 0:
10241 /* This signifies an ellipses, any further arguments are all ok. */
10242 res = true;
10243 goto end;
10244 case VOID_TYPE:
10245 /* This signifies an endlink, if no arguments remain, return
10246 true, otherwise return false. */
10247 res = (i == gimple_call_num_args (call));
10248 goto end;
10249 default:
10250 /* If no parameters remain or the parameter's code does not
10251 match the specified code, return false. Otherwise continue
10252 checking any remaining arguments. */
10253 arg = gimple_call_arg (call, i++);
10254 if (!validate_arg (arg, code))
10255 goto end;
10256 break;
10259 while (1);
10261 /* We need gotos here since we can only have one VA_CLOSE in a
10262 function. */
10263 end: ;
10264 va_end (ap);
10266 return res;
10269 /* Default target-specific builtin expander that does nothing. */
10272 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10273 rtx target ATTRIBUTE_UNUSED,
10274 rtx subtarget ATTRIBUTE_UNUSED,
10275 machine_mode mode ATTRIBUTE_UNUSED,
10276 int ignore ATTRIBUTE_UNUSED)
10278 return NULL_RTX;
10281 /* Returns true is EXP represents data that would potentially reside
10282 in a readonly section. */
10284 bool
10285 readonly_data_expr (tree exp)
10287 STRIP_NOPS (exp);
10289 if (TREE_CODE (exp) != ADDR_EXPR)
10290 return false;
10292 exp = get_base_address (TREE_OPERAND (exp, 0));
10293 if (!exp)
10294 return false;
10296 /* Make sure we call decl_readonly_section only for trees it
10297 can handle (since it returns true for everything it doesn't
10298 understand). */
10299 if (TREE_CODE (exp) == STRING_CST
10300 || TREE_CODE (exp) == CONSTRUCTOR
10301 || (VAR_P (exp) && TREE_STATIC (exp)))
10302 return decl_readonly_section (exp, 0);
10303 else
10304 return false;
10307 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10308 to the call, and TYPE is its return type.
10310 Return NULL_TREE if no simplification was possible, otherwise return the
10311 simplified form of the call as a tree.
10313 The simplified form may be a constant or other expression which
10314 computes the same value, but in a more efficient manner (including
10315 calls to other builtin functions).
10317 The call may contain arguments which need to be evaluated, but
10318 which are not useful to determine the result of the call. In
10319 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10320 COMPOUND_EXPR will be an argument which must be evaluated.
10321 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10322 COMPOUND_EXPR in the chain will contain the tree for the simplified
10323 form of the builtin function call. */
10325 static tree
10326 fold_builtin_strpbrk (location_t loc, tree, tree s1, tree s2, tree type)
10328 if (!validate_arg (s1, POINTER_TYPE)
10329 || !validate_arg (s2, POINTER_TYPE))
10330 return NULL_TREE;
10332 tree fn;
10333 const char *p1, *p2;
10335 p2 = c_getstr (s2);
10336 if (p2 == NULL)
10337 return NULL_TREE;
10339 p1 = c_getstr (s1);
10340 if (p1 != NULL)
10342 const char *r = strpbrk (p1, p2);
10343 tree tem;
10345 if (r == NULL)
10346 return build_int_cst (TREE_TYPE (s1), 0);
10348 /* Return an offset into the constant string argument. */
10349 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10350 return fold_convert_loc (loc, type, tem);
10353 if (p2[0] == '\0')
10354 /* strpbrk(x, "") == NULL.
10355 Evaluate and ignore s1 in case it had side-effects. */
10356 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
10358 if (p2[1] != '\0')
10359 return NULL_TREE; /* Really call strpbrk. */
10361 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10362 if (!fn)
10363 return NULL_TREE;
10365 /* New argument list transforming strpbrk(s1, s2) to
10366 strchr(s1, s2[0]). */
10367 return build_call_expr_loc (loc, fn, 2, s1,
10368 build_int_cst (integer_type_node, p2[0]));
10371 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10372 to the call.
10374 Return NULL_TREE if no simplification was possible, otherwise return the
10375 simplified form of the call as a tree.
10377 The simplified form may be a constant or other expression which
10378 computes the same value, but in a more efficient manner (including
10379 calls to other builtin functions).
10381 The call may contain arguments which need to be evaluated, but
10382 which are not useful to determine the result of the call. In
10383 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10384 COMPOUND_EXPR will be an argument which must be evaluated.
10385 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10386 COMPOUND_EXPR in the chain will contain the tree for the simplified
10387 form of the builtin function call. */
10389 static tree
10390 fold_builtin_strspn (location_t loc, tree expr, tree s1, tree s2)
10392 if (!validate_arg (s1, POINTER_TYPE)
10393 || !validate_arg (s2, POINTER_TYPE))
10394 return NULL_TREE;
10396 if (!check_nul_terminated_array (expr, s1)
10397 || !check_nul_terminated_array (expr, s2))
10398 return NULL_TREE;
10400 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10402 /* If either argument is "", return NULL_TREE. */
10403 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10404 /* Evaluate and ignore both arguments in case either one has
10405 side-effects. */
10406 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
10407 s1, s2);
10408 return NULL_TREE;
10411 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10412 to the call.
10414 Return NULL_TREE if no simplification was possible, otherwise return the
10415 simplified form of the call as a tree.
10417 The simplified form may be a constant or other expression which
10418 computes the same value, but in a more efficient manner (including
10419 calls to other builtin functions).
10421 The call may contain arguments which need to be evaluated, but
10422 which are not useful to determine the result of the call. In
10423 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10424 COMPOUND_EXPR will be an argument which must be evaluated.
10425 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10426 COMPOUND_EXPR in the chain will contain the tree for the simplified
10427 form of the builtin function call. */
10429 static tree
10430 fold_builtin_strcspn (location_t loc, tree expr, tree s1, tree s2)
10432 if (!validate_arg (s1, POINTER_TYPE)
10433 || !validate_arg (s2, POINTER_TYPE))
10434 return NULL_TREE;
10436 if (!check_nul_terminated_array (expr, s1)
10437 || !check_nul_terminated_array (expr, s2))
10438 return NULL_TREE;
10440 /* If the first argument is "", return NULL_TREE. */
10441 const char *p1 = c_getstr (s1);
10442 if (p1 && *p1 == '\0')
10444 /* Evaluate and ignore argument s2 in case it has
10445 side-effects. */
10446 return omit_one_operand_loc (loc, size_type_node,
10447 size_zero_node, s2);
10450 /* If the second argument is "", return __builtin_strlen(s1). */
10451 const char *p2 = c_getstr (s2);
10452 if (p2 && *p2 == '\0')
10454 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
10456 /* If the replacement _DECL isn't initialized, don't do the
10457 transformation. */
10458 if (!fn)
10459 return NULL_TREE;
10461 return build_call_expr_loc (loc, fn, 1, s1);
10463 return NULL_TREE;
10466 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10467 produced. False otherwise. This is done so that we don't output the error
10468 or warning twice or three times. */
10470 bool
10471 fold_builtin_next_arg (tree exp, bool va_start_p)
10473 tree fntype = TREE_TYPE (current_function_decl);
10474 int nargs = call_expr_nargs (exp);
10475 tree arg;
10476 /* There is good chance the current input_location points inside the
10477 definition of the va_start macro (perhaps on the token for
10478 builtin) in a system header, so warnings will not be emitted.
10479 Use the location in real source code. */
10480 location_t current_location =
10481 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10482 NULL);
10484 if (!stdarg_p (fntype))
10486 error ("%<va_start%> used in function with fixed arguments");
10487 return true;
10490 if (va_start_p)
10492 if (va_start_p && (nargs != 2))
10494 error ("wrong number of arguments to function %<va_start%>");
10495 return true;
10497 arg = CALL_EXPR_ARG (exp, 1);
10499 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10500 when we checked the arguments and if needed issued a warning. */
10501 else
10503 if (nargs == 0)
10505 /* Evidently an out of date version of <stdarg.h>; can't validate
10506 va_start's second argument, but can still work as intended. */
10507 warning_at (current_location,
10508 OPT_Wvarargs,
10509 "%<__builtin_next_arg%> called without an argument");
10510 return true;
10512 else if (nargs > 1)
10514 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10515 return true;
10517 arg = CALL_EXPR_ARG (exp, 0);
10520 if (TREE_CODE (arg) == SSA_NAME
10521 && SSA_NAME_VAR (arg))
10522 arg = SSA_NAME_VAR (arg);
10524 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10525 or __builtin_next_arg (0) the first time we see it, after checking
10526 the arguments and if needed issuing a warning. */
10527 if (!integer_zerop (arg))
10529 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10531 /* Strip off all nops for the sake of the comparison. This
10532 is not quite the same as STRIP_NOPS. It does more.
10533 We must also strip off INDIRECT_EXPR for C++ reference
10534 parameters. */
10535 while (CONVERT_EXPR_P (arg)
10536 || INDIRECT_REF_P (arg))
10537 arg = TREE_OPERAND (arg, 0);
10538 if (arg != last_parm)
10540 /* FIXME: Sometimes with the tree optimizers we can get the
10541 not the last argument even though the user used the last
10542 argument. We just warn and set the arg to be the last
10543 argument so that we will get wrong-code because of
10544 it. */
10545 warning_at (current_location,
10546 OPT_Wvarargs,
10547 "second parameter of %<va_start%> not last named argument");
10550 /* Undefined by C99 7.15.1.4p4 (va_start):
10551 "If the parameter parmN is declared with the register storage
10552 class, with a function or array type, or with a type that is
10553 not compatible with the type that results after application of
10554 the default argument promotions, the behavior is undefined."
10556 else if (DECL_REGISTER (arg))
10558 warning_at (current_location,
10559 OPT_Wvarargs,
10560 "undefined behavior when second parameter of "
10561 "%<va_start%> is declared with %<register%> storage");
10564 /* We want to verify the second parameter just once before the tree
10565 optimizers are run and then avoid keeping it in the tree,
10566 as otherwise we could warn even for correct code like:
10567 void foo (int i, ...)
10568 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10569 if (va_start_p)
10570 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10571 else
10572 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10574 return false;
10578 /* Expand a call EXP to __builtin_object_size. */
10580 static rtx
10581 expand_builtin_object_size (tree exp)
10583 tree ost;
10584 int object_size_type;
10585 tree fndecl = get_callee_fndecl (exp);
10587 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10589 error ("first argument of %qD must be a pointer, second integer constant",
10590 fndecl);
10591 expand_builtin_trap ();
10592 return const0_rtx;
10595 ost = CALL_EXPR_ARG (exp, 1);
10596 STRIP_NOPS (ost);
10598 if (TREE_CODE (ost) != INTEGER_CST
10599 || tree_int_cst_sgn (ost) < 0
10600 || compare_tree_int (ost, 3) > 0)
10602 error ("last argument of %qD is not integer constant between 0 and 3",
10603 fndecl);
10604 expand_builtin_trap ();
10605 return const0_rtx;
10608 object_size_type = tree_to_shwi (ost);
10610 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10613 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10614 FCODE is the BUILT_IN_* to use.
10615 Return NULL_RTX if we failed; the caller should emit a normal call,
10616 otherwise try to get the result in TARGET, if convenient (and in
10617 mode MODE if that's convenient). */
10619 static rtx
10620 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10621 enum built_in_function fcode)
10623 if (!validate_arglist (exp,
10624 POINTER_TYPE,
10625 fcode == BUILT_IN_MEMSET_CHK
10626 ? INTEGER_TYPE : POINTER_TYPE,
10627 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10628 return NULL_RTX;
10630 tree dest = CALL_EXPR_ARG (exp, 0);
10631 tree src = CALL_EXPR_ARG (exp, 1);
10632 tree len = CALL_EXPR_ARG (exp, 2);
10633 tree size = CALL_EXPR_ARG (exp, 3);
10635 /* FIXME: Set access mode to write only for memset et al. */
10636 bool sizes_ok = check_access (exp, len, /*maxread=*/NULL_TREE,
10637 /*srcstr=*/NULL_TREE, size, access_read_write);
10639 if (!tree_fits_uhwi_p (size))
10640 return NULL_RTX;
10642 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10644 /* Avoid transforming the checking call to an ordinary one when
10645 an overflow has been detected or when the call couldn't be
10646 validated because the size is not constant. */
10647 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10648 return NULL_RTX;
10650 tree fn = NULL_TREE;
10651 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10652 mem{cpy,pcpy,move,set} is available. */
10653 switch (fcode)
10655 case BUILT_IN_MEMCPY_CHK:
10656 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10657 break;
10658 case BUILT_IN_MEMPCPY_CHK:
10659 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10660 break;
10661 case BUILT_IN_MEMMOVE_CHK:
10662 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10663 break;
10664 case BUILT_IN_MEMSET_CHK:
10665 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10666 break;
10667 default:
10668 break;
10671 if (! fn)
10672 return NULL_RTX;
10674 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10675 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10676 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10677 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10679 else if (fcode == BUILT_IN_MEMSET_CHK)
10680 return NULL_RTX;
10681 else
10683 unsigned int dest_align = get_pointer_alignment (dest);
10685 /* If DEST is not a pointer type, call the normal function. */
10686 if (dest_align == 0)
10687 return NULL_RTX;
10689 /* If SRC and DEST are the same (and not volatile), do nothing. */
10690 if (operand_equal_p (src, dest, 0))
10692 tree expr;
10694 if (fcode != BUILT_IN_MEMPCPY_CHK)
10696 /* Evaluate and ignore LEN in case it has side-effects. */
10697 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10698 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10701 expr = fold_build_pointer_plus (dest, len);
10702 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10705 /* __memmove_chk special case. */
10706 if (fcode == BUILT_IN_MEMMOVE_CHK)
10708 unsigned int src_align = get_pointer_alignment (src);
10710 if (src_align == 0)
10711 return NULL_RTX;
10713 /* If src is categorized for a readonly section we can use
10714 normal __memcpy_chk. */
10715 if (readonly_data_expr (src))
10717 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10718 if (!fn)
10719 return NULL_RTX;
10720 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10721 dest, src, len, size);
10722 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10723 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10724 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10727 return NULL_RTX;
10731 /* Emit warning if a buffer overflow is detected at compile time. */
10733 static void
10734 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10736 /* The source string. */
10737 tree srcstr = NULL_TREE;
10738 /* The size of the destination object returned by __builtin_object_size. */
10739 tree objsize = NULL_TREE;
10740 /* The string that is being concatenated with (as in __strcat_chk)
10741 or null if it isn't. */
10742 tree catstr = NULL_TREE;
10743 /* The maximum length of the source sequence in a bounded operation
10744 (such as __strncat_chk) or null if the operation isn't bounded
10745 (such as __strcat_chk). */
10746 tree maxread = NULL_TREE;
10747 /* The exact size of the access (such as in __strncpy_chk). */
10748 tree size = NULL_TREE;
10749 /* The access by the function that's checked. Except for snprintf
10750 both writing and reading is checked. */
10751 access_mode mode = access_read_write;
10753 switch (fcode)
10755 case BUILT_IN_STRCPY_CHK:
10756 case BUILT_IN_STPCPY_CHK:
10757 srcstr = CALL_EXPR_ARG (exp, 1);
10758 objsize = CALL_EXPR_ARG (exp, 2);
10759 break;
10761 case BUILT_IN_STRCAT_CHK:
10762 /* For __strcat_chk the warning will be emitted only if overflowing
10763 by at least strlen (dest) + 1 bytes. */
10764 catstr = CALL_EXPR_ARG (exp, 0);
10765 srcstr = CALL_EXPR_ARG (exp, 1);
10766 objsize = CALL_EXPR_ARG (exp, 2);
10767 break;
10769 case BUILT_IN_STRNCAT_CHK:
10770 catstr = CALL_EXPR_ARG (exp, 0);
10771 srcstr = CALL_EXPR_ARG (exp, 1);
10772 maxread = CALL_EXPR_ARG (exp, 2);
10773 objsize = CALL_EXPR_ARG (exp, 3);
10774 break;
10776 case BUILT_IN_STRNCPY_CHK:
10777 case BUILT_IN_STPNCPY_CHK:
10778 srcstr = CALL_EXPR_ARG (exp, 1);
10779 size = CALL_EXPR_ARG (exp, 2);
10780 objsize = CALL_EXPR_ARG (exp, 3);
10781 break;
10783 case BUILT_IN_SNPRINTF_CHK:
10784 case BUILT_IN_VSNPRINTF_CHK:
10785 maxread = CALL_EXPR_ARG (exp, 1);
10786 objsize = CALL_EXPR_ARG (exp, 3);
10787 /* The only checked access the write to the destination. */
10788 mode = access_write_only;
10789 break;
10790 default:
10791 gcc_unreachable ();
10794 if (catstr && maxread)
10796 /* Check __strncat_chk. There is no way to determine the length
10797 of the string to which the source string is being appended so
10798 just warn when the length of the source string is not known. */
10799 check_strncat_sizes (exp, objsize);
10800 return;
10803 check_access (exp, size, maxread, srcstr, objsize, mode);
10806 /* Emit warning if a buffer overflow is detected at compile time
10807 in __sprintf_chk/__vsprintf_chk calls. */
10809 static void
10810 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10812 tree size, len, fmt;
10813 const char *fmt_str;
10814 int nargs = call_expr_nargs (exp);
10816 /* Verify the required arguments in the original call. */
10818 if (nargs < 4)
10819 return;
10820 size = CALL_EXPR_ARG (exp, 2);
10821 fmt = CALL_EXPR_ARG (exp, 3);
10823 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10824 return;
10826 /* Check whether the format is a literal string constant. */
10827 fmt_str = c_getstr (fmt);
10828 if (fmt_str == NULL)
10829 return;
10831 if (!init_target_chars ())
10832 return;
10834 /* If the format doesn't contain % args or %%, we know its size. */
10835 if (strchr (fmt_str, target_percent) == 0)
10836 len = build_int_cstu (size_type_node, strlen (fmt_str));
10837 /* If the format is "%s" and first ... argument is a string literal,
10838 we know it too. */
10839 else if (fcode == BUILT_IN_SPRINTF_CHK
10840 && strcmp (fmt_str, target_percent_s) == 0)
10842 tree arg;
10844 if (nargs < 5)
10845 return;
10846 arg = CALL_EXPR_ARG (exp, 4);
10847 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10848 return;
10850 len = c_strlen (arg, 1);
10851 if (!len || ! tree_fits_uhwi_p (len))
10852 return;
10854 else
10855 return;
10857 /* Add one for the terminating nul. */
10858 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10860 check_access (exp, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, len, size,
10861 access_write_only);
10864 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10865 if possible. */
10867 static tree
10868 fold_builtin_object_size (tree ptr, tree ost, enum built_in_function fcode)
10870 tree bytes;
10871 int object_size_type;
10873 if (!validate_arg (ptr, POINTER_TYPE)
10874 || !validate_arg (ost, INTEGER_TYPE))
10875 return NULL_TREE;
10877 STRIP_NOPS (ost);
10879 if (TREE_CODE (ost) != INTEGER_CST
10880 || tree_int_cst_sgn (ost) < 0
10881 || compare_tree_int (ost, 3) > 0)
10882 return NULL_TREE;
10884 object_size_type = tree_to_shwi (ost);
10886 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10887 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10888 and (size_t) 0 for types 2 and 3. */
10889 if (TREE_SIDE_EFFECTS (ptr))
10890 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10892 if (fcode == BUILT_IN_DYNAMIC_OBJECT_SIZE)
10893 object_size_type |= OST_DYNAMIC;
10895 if (TREE_CODE (ptr) == ADDR_EXPR)
10897 compute_builtin_object_size (ptr, object_size_type, &bytes);
10898 if ((object_size_type & OST_DYNAMIC)
10899 || int_fits_type_p (bytes, size_type_node))
10900 return fold_convert (size_type_node, bytes);
10902 else if (TREE_CODE (ptr) == SSA_NAME)
10904 /* If object size is not known yet, delay folding until
10905 later. Maybe subsequent passes will help determining
10906 it. */
10907 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10908 && ((object_size_type & OST_DYNAMIC)
10909 || int_fits_type_p (bytes, size_type_node)))
10910 return fold_convert (size_type_node, bytes);
10913 return NULL_TREE;
10916 /* Builtins with folding operations that operate on "..." arguments
10917 need special handling; we need to store the arguments in a convenient
10918 data structure before attempting any folding. Fortunately there are
10919 only a few builtins that fall into this category. FNDECL is the
10920 function, EXP is the CALL_EXPR for the call. */
10922 static tree
10923 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10925 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10926 tree ret = NULL_TREE;
10928 switch (fcode)
10930 case BUILT_IN_FPCLASSIFY:
10931 ret = fold_builtin_fpclassify (loc, args, nargs);
10932 break;
10934 case BUILT_IN_ADDC:
10935 case BUILT_IN_ADDCL:
10936 case BUILT_IN_ADDCLL:
10937 case BUILT_IN_SUBC:
10938 case BUILT_IN_SUBCL:
10939 case BUILT_IN_SUBCLL:
10940 return fold_builtin_addc_subc (loc, fcode, args);
10942 default:
10943 break;
10945 if (ret)
10947 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10948 SET_EXPR_LOCATION (ret, loc);
10949 suppress_warning (ret);
10950 return ret;
10952 return NULL_TREE;
10955 /* Initialize format string characters in the target charset. */
10957 bool
10958 init_target_chars (void)
10960 static bool init;
10961 if (!init)
10963 target_newline = lang_hooks.to_target_charset ('\n');
10964 target_percent = lang_hooks.to_target_charset ('%');
10965 target_c = lang_hooks.to_target_charset ('c');
10966 target_s = lang_hooks.to_target_charset ('s');
10967 if (target_newline == 0 || target_percent == 0 || target_c == 0
10968 || target_s == 0)
10969 return false;
10971 target_percent_c[0] = target_percent;
10972 target_percent_c[1] = target_c;
10973 target_percent_c[2] = '\0';
10975 target_percent_s[0] = target_percent;
10976 target_percent_s[1] = target_s;
10977 target_percent_s[2] = '\0';
10979 target_percent_s_newline[0] = target_percent;
10980 target_percent_s_newline[1] = target_s;
10981 target_percent_s_newline[2] = target_newline;
10982 target_percent_s_newline[3] = '\0';
10984 init = true;
10986 return true;
10989 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10990 and no overflow/underflow occurred. INEXACT is true if M was not
10991 exactly calculated. TYPE is the tree type for the result. This
10992 function assumes that you cleared the MPFR flags and then
10993 calculated M to see if anything subsequently set a flag prior to
10994 entering this function. Return NULL_TREE if any checks fail. */
10996 static tree
10997 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10999 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11000 overflow/underflow occurred. If -frounding-math, proceed iff the
11001 result of calling FUNC was exact. */
11002 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11003 && (!flag_rounding_math || !inexact))
11005 REAL_VALUE_TYPE rr;
11007 real_from_mpfr (&rr, m, type, MPFR_RNDN);
11008 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11009 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11010 but the mpfr_t is not, then we underflowed in the
11011 conversion. */
11012 if (real_isfinite (&rr)
11013 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11015 REAL_VALUE_TYPE rmode;
11017 real_convert (&rmode, TYPE_MODE (type), &rr);
11018 /* Proceed iff the specified mode can hold the value. */
11019 if (real_identical (&rmode, &rr))
11020 return build_real (type, rmode);
11023 return NULL_TREE;
11026 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11027 number and no overflow/underflow occurred. INEXACT is true if M
11028 was not exactly calculated. TYPE is the tree type for the result.
11029 This function assumes that you cleared the MPFR flags and then
11030 calculated M to see if anything subsequently set a flag prior to
11031 entering this function. Return NULL_TREE if any checks fail, if
11032 FORCE_CONVERT is true, then bypass the checks. */
11034 static tree
11035 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11037 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11038 overflow/underflow occurred. If -frounding-math, proceed iff the
11039 result of calling FUNC was exact. */
11040 if (force_convert
11041 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11042 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11043 && (!flag_rounding_math || !inexact)))
11045 REAL_VALUE_TYPE re, im;
11047 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), MPFR_RNDN);
11048 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), MPFR_RNDN);
11049 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11050 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11051 but the mpfr_t is not, then we underflowed in the
11052 conversion. */
11053 if (force_convert
11054 || (real_isfinite (&re) && real_isfinite (&im)
11055 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11056 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11058 REAL_VALUE_TYPE re_mode, im_mode;
11060 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11061 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11062 /* Proceed iff the specified mode can hold the value. */
11063 if (force_convert
11064 || (real_identical (&re_mode, &re)
11065 && real_identical (&im_mode, &im)))
11066 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11067 build_real (TREE_TYPE (type), im_mode));
11070 return NULL_TREE;
11073 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
11074 the pointer *(ARG_QUO) and return the result. The type is taken
11075 from the type of ARG0 and is used for setting the precision of the
11076 calculation and results. */
11078 static tree
11079 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
11081 tree const type = TREE_TYPE (arg0);
11082 tree result = NULL_TREE;
11084 STRIP_NOPS (arg0);
11085 STRIP_NOPS (arg1);
11087 /* To proceed, MPFR must exactly represent the target floating point
11088 format, which only happens when the target base equals two. */
11089 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11090 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
11091 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
11093 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
11094 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
11096 if (real_isfinite (ra0) && real_isfinite (ra1))
11098 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11099 const int prec = fmt->p;
11100 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
11101 tree result_rem;
11102 long integer_quo;
11103 mpfr_t m0, m1;
11105 mpfr_inits2 (prec, m0, m1, NULL);
11106 mpfr_from_real (m0, ra0, MPFR_RNDN);
11107 mpfr_from_real (m1, ra1, MPFR_RNDN);
11108 mpfr_clear_flags ();
11109 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
11110 /* Remquo is independent of the rounding mode, so pass
11111 inexact=0 to do_mpfr_ckconv(). */
11112 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
11113 mpfr_clears (m0, m1, NULL);
11114 if (result_rem)
11116 /* MPFR calculates quo in the host's long so it may
11117 return more bits in quo than the target int can hold
11118 if sizeof(host long) > sizeof(target int). This can
11119 happen even for native compilers in LP64 mode. In
11120 these cases, modulo the quo value with the largest
11121 number that the target int can hold while leaving one
11122 bit for the sign. */
11123 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
11124 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
11126 /* Dereference the quo pointer argument. */
11127 arg_quo = build_fold_indirect_ref (arg_quo);
11128 /* Proceed iff a valid pointer type was passed in. */
11129 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
11131 /* Set the value. */
11132 tree result_quo
11133 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
11134 build_int_cst (TREE_TYPE (arg_quo),
11135 integer_quo));
11136 TREE_SIDE_EFFECTS (result_quo) = 1;
11137 /* Combine the quo assignment with the rem. */
11138 result = fold_build2 (COMPOUND_EXPR, type,
11139 result_quo, result_rem);
11140 suppress_warning (result, OPT_Wunused_value);
11141 result = non_lvalue (result);
11146 return result;
11149 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
11150 resulting value as a tree with type TYPE. The mpfr precision is
11151 set to the precision of TYPE. We assume that this mpfr function
11152 returns zero if the result could be calculated exactly within the
11153 requested precision. In addition, the integer pointer represented
11154 by ARG_SG will be dereferenced and set to the appropriate signgam
11155 (-1,1) value. */
11157 static tree
11158 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
11160 tree result = NULL_TREE;
11162 STRIP_NOPS (arg);
11164 /* To proceed, MPFR must exactly represent the target floating point
11165 format, which only happens when the target base equals two. Also
11166 verify ARG is a constant and that ARG_SG is an int pointer. */
11167 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11168 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
11169 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
11170 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
11172 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
11174 /* In addition to NaN and Inf, the argument cannot be zero or a
11175 negative integer. */
11176 if (real_isfinite (ra)
11177 && ra->cl != rvc_zero
11178 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
11180 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11181 const int prec = fmt->p;
11182 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
11183 int inexact, sg;
11184 tree result_lg;
11186 auto_mpfr m (prec);
11187 mpfr_from_real (m, ra, MPFR_RNDN);
11188 mpfr_clear_flags ();
11189 inexact = mpfr_lgamma (m, &sg, m, rnd);
11190 result_lg = do_mpfr_ckconv (m, type, inexact);
11191 if (result_lg)
11193 tree result_sg;
11195 /* Dereference the arg_sg pointer argument. */
11196 arg_sg = build_fold_indirect_ref (arg_sg);
11197 /* Assign the signgam value into *arg_sg. */
11198 result_sg = fold_build2 (MODIFY_EXPR,
11199 TREE_TYPE (arg_sg), arg_sg,
11200 build_int_cst (TREE_TYPE (arg_sg), sg));
11201 TREE_SIDE_EFFECTS (result_sg) = 1;
11202 /* Combine the signgam assignment with the lgamma result. */
11203 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11204 result_sg, result_lg));
11209 return result;
11212 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
11213 mpc function FUNC on it and return the resulting value as a tree
11214 with type TYPE. The mpfr precision is set to the precision of
11215 TYPE. We assume that function FUNC returns zero if the result
11216 could be calculated exactly within the requested precision. If
11217 DO_NONFINITE is true, then fold expressions containing Inf or NaN
11218 in the arguments and/or results. */
11220 tree
11221 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
11222 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
11224 tree result = NULL_TREE;
11226 STRIP_NOPS (arg0);
11227 STRIP_NOPS (arg1);
11229 /* To proceed, MPFR must exactly represent the target floating point
11230 format, which only happens when the target base equals two. */
11231 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
11232 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_TYPE (arg0)))
11233 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
11234 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_TYPE (arg1)))
11235 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
11237 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
11238 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
11239 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
11240 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
11242 if (do_nonfinite
11243 || (real_isfinite (re0) && real_isfinite (im0)
11244 && real_isfinite (re1) && real_isfinite (im1)))
11246 const struct real_format *const fmt =
11247 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11248 const int prec = fmt->p;
11249 const mpfr_rnd_t rnd = fmt->round_towards_zero
11250 ? MPFR_RNDZ : MPFR_RNDN;
11251 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11252 int inexact;
11253 mpc_t m0, m1;
11255 mpc_init2 (m0, prec);
11256 mpc_init2 (m1, prec);
11257 mpfr_from_real (mpc_realref (m0), re0, rnd);
11258 mpfr_from_real (mpc_imagref (m0), im0, rnd);
11259 mpfr_from_real (mpc_realref (m1), re1, rnd);
11260 mpfr_from_real (mpc_imagref (m1), im1, rnd);
11261 mpfr_clear_flags ();
11262 inexact = func (m0, m0, m1, crnd);
11263 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
11264 mpc_clear (m0);
11265 mpc_clear (m1);
11269 return result;
11272 /* A wrapper function for builtin folding that prevents warnings for
11273 "statement without effect" and the like, caused by removing the
11274 call node earlier than the warning is generated. */
11276 tree
11277 fold_call_stmt (gcall *stmt, bool ignore)
11279 tree ret = NULL_TREE;
11280 tree fndecl = gimple_call_fndecl (stmt);
11281 location_t loc = gimple_location (stmt);
11282 if (fndecl && fndecl_built_in_p (fndecl)
11283 && !gimple_call_va_arg_pack_p (stmt))
11285 int nargs = gimple_call_num_args (stmt);
11286 tree *args = (nargs > 0
11287 ? gimple_call_arg_ptr (stmt, 0)
11288 : &error_mark_node);
11290 if (avoid_folding_inline_builtin (fndecl))
11291 return NULL_TREE;
11292 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11294 return targetm.fold_builtin (fndecl, nargs, args, ignore);
11296 else
11298 ret = fold_builtin_n (loc, NULL_TREE, fndecl, args, nargs, ignore);
11299 if (ret)
11301 /* Propagate location information from original call to
11302 expansion of builtin. Otherwise things like
11303 maybe_emit_chk_warning, that operate on the expansion
11304 of a builtin, will use the wrong location information. */
11305 if (gimple_has_location (stmt))
11307 tree realret = ret;
11308 if (TREE_CODE (ret) == NOP_EXPR)
11309 realret = TREE_OPERAND (ret, 0);
11310 if (CAN_HAVE_LOCATION_P (realret)
11311 && !EXPR_HAS_LOCATION (realret))
11312 SET_EXPR_LOCATION (realret, loc);
11313 return realret;
11315 return ret;
11319 return NULL_TREE;
11322 /* Look up the function in builtin_decl that corresponds to DECL
11323 and set ASMSPEC as its user assembler name. DECL must be a
11324 function decl that declares a builtin. */
11326 void
11327 set_builtin_user_assembler_name (tree decl, const char *asmspec)
11329 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
11330 && asmspec != 0);
11332 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
11333 set_user_assembler_name (builtin, asmspec);
11335 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
11336 && INT_TYPE_SIZE < BITS_PER_WORD)
11338 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
11339 set_user_assembler_libfunc ("ffs", asmspec);
11340 set_optab_libfunc (ffs_optab, mode, "ffs");
11344 /* Return true if DECL is a builtin that expands to a constant or similarly
11345 simple code. */
11346 bool
11347 is_simple_builtin (tree decl)
11349 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
11350 switch (DECL_FUNCTION_CODE (decl))
11352 /* Builtins that expand to constants. */
11353 case BUILT_IN_CONSTANT_P:
11354 case BUILT_IN_EXPECT:
11355 case BUILT_IN_OBJECT_SIZE:
11356 case BUILT_IN_UNREACHABLE:
11357 /* Simple register moves or loads from stack. */
11358 case BUILT_IN_ASSUME_ALIGNED:
11359 case BUILT_IN_RETURN_ADDRESS:
11360 case BUILT_IN_EXTRACT_RETURN_ADDR:
11361 case BUILT_IN_FROB_RETURN_ADDR:
11362 case BUILT_IN_RETURN:
11363 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
11364 case BUILT_IN_FRAME_ADDRESS:
11365 case BUILT_IN_VA_END:
11366 case BUILT_IN_STACK_SAVE:
11367 case BUILT_IN_STACK_RESTORE:
11368 case BUILT_IN_DWARF_CFA:
11369 /* Exception state returns or moves registers around. */
11370 case BUILT_IN_EH_FILTER:
11371 case BUILT_IN_EH_POINTER:
11372 case BUILT_IN_EH_COPY_VALUES:
11373 return true;
11375 default:
11376 return false;
11379 return false;
11382 /* Return true if DECL is a builtin that is not expensive, i.e., they are
11383 most probably expanded inline into reasonably simple code. This is a
11384 superset of is_simple_builtin. */
11385 bool
11386 is_inexpensive_builtin (tree decl)
11388 if (!decl)
11389 return false;
11390 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
11391 return true;
11392 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11393 switch (DECL_FUNCTION_CODE (decl))
11395 case BUILT_IN_ABS:
11396 CASE_BUILT_IN_ALLOCA:
11397 case BUILT_IN_BSWAP16:
11398 case BUILT_IN_BSWAP32:
11399 case BUILT_IN_BSWAP64:
11400 case BUILT_IN_BSWAP128:
11401 case BUILT_IN_CLZ:
11402 case BUILT_IN_CLZIMAX:
11403 case BUILT_IN_CLZL:
11404 case BUILT_IN_CLZLL:
11405 case BUILT_IN_CTZ:
11406 case BUILT_IN_CTZIMAX:
11407 case BUILT_IN_CTZL:
11408 case BUILT_IN_CTZLL:
11409 case BUILT_IN_FFS:
11410 case BUILT_IN_FFSIMAX:
11411 case BUILT_IN_FFSL:
11412 case BUILT_IN_FFSLL:
11413 case BUILT_IN_IMAXABS:
11414 case BUILT_IN_FINITE:
11415 case BUILT_IN_FINITEF:
11416 case BUILT_IN_FINITEL:
11417 case BUILT_IN_FINITED32:
11418 case BUILT_IN_FINITED64:
11419 case BUILT_IN_FINITED128:
11420 case BUILT_IN_FPCLASSIFY:
11421 case BUILT_IN_ISFINITE:
11422 case BUILT_IN_ISINF_SIGN:
11423 case BUILT_IN_ISINF:
11424 case BUILT_IN_ISINFF:
11425 case BUILT_IN_ISINFL:
11426 case BUILT_IN_ISINFD32:
11427 case BUILT_IN_ISINFD64:
11428 case BUILT_IN_ISINFD128:
11429 case BUILT_IN_ISNAN:
11430 case BUILT_IN_ISNANF:
11431 case BUILT_IN_ISNANL:
11432 case BUILT_IN_ISNAND32:
11433 case BUILT_IN_ISNAND64:
11434 case BUILT_IN_ISNAND128:
11435 case BUILT_IN_ISNORMAL:
11436 case BUILT_IN_ISGREATER:
11437 case BUILT_IN_ISGREATEREQUAL:
11438 case BUILT_IN_ISLESS:
11439 case BUILT_IN_ISLESSEQUAL:
11440 case BUILT_IN_ISLESSGREATER:
11441 case BUILT_IN_ISUNORDERED:
11442 case BUILT_IN_ISEQSIG:
11443 case BUILT_IN_VA_ARG_PACK:
11444 case BUILT_IN_VA_ARG_PACK_LEN:
11445 case BUILT_IN_VA_COPY:
11446 case BUILT_IN_TRAP:
11447 case BUILT_IN_UNREACHABLE_TRAP:
11448 case BUILT_IN_SAVEREGS:
11449 case BUILT_IN_POPCOUNTL:
11450 case BUILT_IN_POPCOUNTLL:
11451 case BUILT_IN_POPCOUNTIMAX:
11452 case BUILT_IN_POPCOUNT:
11453 case BUILT_IN_PARITYL:
11454 case BUILT_IN_PARITYLL:
11455 case BUILT_IN_PARITYIMAX:
11456 case BUILT_IN_PARITY:
11457 case BUILT_IN_LABS:
11458 case BUILT_IN_LLABS:
11459 case BUILT_IN_PREFETCH:
11460 case BUILT_IN_ACC_ON_DEVICE:
11461 return true;
11463 default:
11464 return is_simple_builtin (decl);
11467 return false;
11470 /* Return true if T is a constant and the value cast to a target char
11471 can be represented by a host char.
11472 Store the casted char constant in *P if so. */
11474 bool
11475 target_char_cst_p (tree t, char *p)
11477 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
11478 return false;
11480 *p = (char)tree_to_uhwi (t);
11481 return true;
11484 /* Return true if the builtin DECL is implemented in a standard library.
11485 Otherwise return false which doesn't guarantee it is not (thus the list
11486 of handled builtins below may be incomplete). */
11488 bool
11489 builtin_with_linkage_p (tree decl)
11491 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11492 switch (DECL_FUNCTION_CODE (decl))
11494 CASE_FLT_FN (BUILT_IN_ACOS):
11495 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ACOS):
11496 CASE_FLT_FN (BUILT_IN_ACOSH):
11497 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ACOSH):
11498 CASE_FLT_FN (BUILT_IN_ASIN):
11499 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ASIN):
11500 CASE_FLT_FN (BUILT_IN_ASINH):
11501 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ASINH):
11502 CASE_FLT_FN (BUILT_IN_ATAN):
11503 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ATAN):
11504 CASE_FLT_FN (BUILT_IN_ATANH):
11505 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ATANH):
11506 CASE_FLT_FN (BUILT_IN_ATAN2):
11507 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ATAN2):
11508 CASE_FLT_FN (BUILT_IN_CBRT):
11509 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CBRT):
11510 CASE_FLT_FN (BUILT_IN_CEIL):
11511 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
11512 CASE_FLT_FN (BUILT_IN_COPYSIGN):
11513 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
11514 CASE_FLT_FN (BUILT_IN_COS):
11515 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COS):
11516 CASE_FLT_FN (BUILT_IN_COSH):
11517 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COSH):
11518 CASE_FLT_FN (BUILT_IN_ERF):
11519 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ERF):
11520 CASE_FLT_FN (BUILT_IN_ERFC):
11521 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ERFC):
11522 CASE_FLT_FN (BUILT_IN_EXP):
11523 CASE_FLT_FN_FLOATN_NX (BUILT_IN_EXP):
11524 CASE_FLT_FN (BUILT_IN_EXP2):
11525 CASE_FLT_FN_FLOATN_NX (BUILT_IN_EXP2):
11526 CASE_FLT_FN (BUILT_IN_EXPM1):
11527 CASE_FLT_FN_FLOATN_NX (BUILT_IN_EXPM1):
11528 CASE_FLT_FN (BUILT_IN_FABS):
11529 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
11530 CASE_FLT_FN (BUILT_IN_FDIM):
11531 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FDIM):
11532 CASE_FLT_FN (BUILT_IN_FLOOR):
11533 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
11534 CASE_FLT_FN (BUILT_IN_FMA):
11535 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
11536 CASE_FLT_FN (BUILT_IN_FMAX):
11537 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
11538 CASE_FLT_FN (BUILT_IN_FMIN):
11539 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
11540 CASE_FLT_FN (BUILT_IN_FMOD):
11541 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMOD):
11542 CASE_FLT_FN (BUILT_IN_FREXP):
11543 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FREXP):
11544 CASE_FLT_FN (BUILT_IN_HYPOT):
11545 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HYPOT):
11546 CASE_FLT_FN (BUILT_IN_ILOGB):
11547 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ILOGB):
11548 CASE_FLT_FN (BUILT_IN_LDEXP):
11549 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LDEXP):
11550 CASE_FLT_FN (BUILT_IN_LGAMMA):
11551 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LGAMMA):
11552 CASE_FLT_FN (BUILT_IN_LLRINT):
11553 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LLRINT):
11554 CASE_FLT_FN (BUILT_IN_LLROUND):
11555 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LLROUND):
11556 CASE_FLT_FN (BUILT_IN_LOG):
11557 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG):
11558 CASE_FLT_FN (BUILT_IN_LOG10):
11559 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG10):
11560 CASE_FLT_FN (BUILT_IN_LOG1P):
11561 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG1P):
11562 CASE_FLT_FN (BUILT_IN_LOG2):
11563 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG2):
11564 CASE_FLT_FN (BUILT_IN_LOGB):
11565 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOGB):
11566 CASE_FLT_FN (BUILT_IN_LRINT):
11567 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LRINT):
11568 CASE_FLT_FN (BUILT_IN_LROUND):
11569 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LROUND):
11570 CASE_FLT_FN (BUILT_IN_MODF):
11571 CASE_FLT_FN_FLOATN_NX (BUILT_IN_MODF):
11572 CASE_FLT_FN (BUILT_IN_NAN):
11573 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NAN):
11574 CASE_FLT_FN (BUILT_IN_NEARBYINT):
11575 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
11576 CASE_FLT_FN (BUILT_IN_NEXTAFTER):
11577 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEXTAFTER):
11578 CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
11579 CASE_FLT_FN (BUILT_IN_POW):
11580 CASE_FLT_FN_FLOATN_NX (BUILT_IN_POW):
11581 CASE_FLT_FN (BUILT_IN_REMAINDER):
11582 CASE_FLT_FN_FLOATN_NX (BUILT_IN_REMAINDER):
11583 CASE_FLT_FN (BUILT_IN_REMQUO):
11584 CASE_FLT_FN_FLOATN_NX (BUILT_IN_REMQUO):
11585 CASE_FLT_FN (BUILT_IN_RINT):
11586 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
11587 CASE_FLT_FN (BUILT_IN_ROUND):
11588 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
11589 CASE_FLT_FN (BUILT_IN_SCALBLN):
11590 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SCALBLN):
11591 CASE_FLT_FN (BUILT_IN_SCALBN):
11592 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SCALBN):
11593 CASE_FLT_FN (BUILT_IN_SIN):
11594 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SIN):
11595 CASE_FLT_FN (BUILT_IN_SINH):
11596 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SINH):
11597 CASE_FLT_FN (BUILT_IN_SINCOS):
11598 CASE_FLT_FN (BUILT_IN_SQRT):
11599 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
11600 CASE_FLT_FN (BUILT_IN_TAN):
11601 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TAN):
11602 CASE_FLT_FN (BUILT_IN_TANH):
11603 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TANH):
11604 CASE_FLT_FN (BUILT_IN_TGAMMA):
11605 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TGAMMA):
11606 CASE_FLT_FN (BUILT_IN_TRUNC):
11607 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
11608 return true;
11610 case BUILT_IN_STPCPY:
11611 case BUILT_IN_STPNCPY:
11612 /* stpcpy is both referenced in libiberty's pex-win32.c and provided
11613 by libiberty's stpcpy.c for MinGW targets so we need to return true
11614 in order to be able to build libiberty in LTO mode for them. */
11615 return true;
11617 default:
11618 break;
11620 return false;
11623 /* Return true if OFFRNG is bounded to a subrange of offset values
11624 valid for the largest possible object. */
11626 bool
11627 access_ref::offset_bounded () const
11629 tree min = TYPE_MIN_VALUE (ptrdiff_type_node);
11630 tree max = TYPE_MAX_VALUE (ptrdiff_type_node);
11631 return wi::to_offset (min) <= offrng[0] && offrng[1] <= wi::to_offset (max);
11634 /* If CALLEE has known side effects, fill in INFO and return true.
11635 See tree-ssa-structalias.cc:find_func_aliases
11636 for the list of builtins we might need to handle here. */
11638 attr_fnspec
11639 builtin_fnspec (tree callee)
11641 built_in_function code = DECL_FUNCTION_CODE (callee);
11643 switch (code)
11645 /* All the following functions read memory pointed to by
11646 their second argument and write memory pointed to by first
11647 argument.
11648 strcat/strncat additionally reads memory pointed to by the first
11649 argument. */
11650 case BUILT_IN_STRCAT:
11651 case BUILT_IN_STRCAT_CHK:
11652 return "1cW 1 ";
11653 case BUILT_IN_STRNCAT:
11654 case BUILT_IN_STRNCAT_CHK:
11655 return "1cW 13";
11656 case BUILT_IN_STRCPY:
11657 case BUILT_IN_STRCPY_CHK:
11658 return "1cO 1 ";
11659 case BUILT_IN_STPCPY:
11660 case BUILT_IN_STPCPY_CHK:
11661 return ".cO 1 ";
11662 case BUILT_IN_STRNCPY:
11663 case BUILT_IN_MEMCPY:
11664 case BUILT_IN_MEMMOVE:
11665 case BUILT_IN_TM_MEMCPY:
11666 case BUILT_IN_TM_MEMMOVE:
11667 case BUILT_IN_STRNCPY_CHK:
11668 case BUILT_IN_MEMCPY_CHK:
11669 case BUILT_IN_MEMMOVE_CHK:
11670 return "1cO313";
11671 case BUILT_IN_MEMPCPY:
11672 case BUILT_IN_MEMPCPY_CHK:
11673 return ".cO313";
11674 case BUILT_IN_STPNCPY:
11675 case BUILT_IN_STPNCPY_CHK:
11676 return ".cO313";
11677 case BUILT_IN_BCOPY:
11678 return ".c23O3";
11679 case BUILT_IN_BZERO:
11680 return ".cO2";
11681 case BUILT_IN_MEMCMP:
11682 case BUILT_IN_MEMCMP_EQ:
11683 case BUILT_IN_BCMP:
11684 case BUILT_IN_STRNCMP:
11685 case BUILT_IN_STRNCMP_EQ:
11686 case BUILT_IN_STRNCASECMP:
11687 return ".cR3R3";
11689 /* The following functions read memory pointed to by their
11690 first argument. */
11691 CASE_BUILT_IN_TM_LOAD (1):
11692 CASE_BUILT_IN_TM_LOAD (2):
11693 CASE_BUILT_IN_TM_LOAD (4):
11694 CASE_BUILT_IN_TM_LOAD (8):
11695 CASE_BUILT_IN_TM_LOAD (FLOAT):
11696 CASE_BUILT_IN_TM_LOAD (DOUBLE):
11697 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
11698 CASE_BUILT_IN_TM_LOAD (M64):
11699 CASE_BUILT_IN_TM_LOAD (M128):
11700 CASE_BUILT_IN_TM_LOAD (M256):
11701 case BUILT_IN_TM_LOG:
11702 case BUILT_IN_TM_LOG_1:
11703 case BUILT_IN_TM_LOG_2:
11704 case BUILT_IN_TM_LOG_4:
11705 case BUILT_IN_TM_LOG_8:
11706 case BUILT_IN_TM_LOG_FLOAT:
11707 case BUILT_IN_TM_LOG_DOUBLE:
11708 case BUILT_IN_TM_LOG_LDOUBLE:
11709 case BUILT_IN_TM_LOG_M64:
11710 case BUILT_IN_TM_LOG_M128:
11711 case BUILT_IN_TM_LOG_M256:
11712 return ".cR ";
11714 case BUILT_IN_INDEX:
11715 case BUILT_IN_RINDEX:
11716 case BUILT_IN_STRCHR:
11717 case BUILT_IN_STRLEN:
11718 case BUILT_IN_STRRCHR:
11719 return ".cR ";
11720 case BUILT_IN_STRNLEN:
11721 return ".cR2";
11723 /* These read memory pointed to by the first argument.
11724 Allocating memory does not have any side-effects apart from
11725 being the definition point for the pointer.
11726 Unix98 specifies that errno is set on allocation failure. */
11727 case BUILT_IN_STRDUP:
11728 return "mCR ";
11729 case BUILT_IN_STRNDUP:
11730 return "mCR2";
11731 /* Allocating memory does not have any side-effects apart from
11732 being the definition point for the pointer. */
11733 case BUILT_IN_MALLOC:
11734 case BUILT_IN_ALIGNED_ALLOC:
11735 case BUILT_IN_CALLOC:
11736 case BUILT_IN_GOMP_ALLOC:
11737 return "mC";
11738 CASE_BUILT_IN_ALLOCA:
11739 return "mc";
11740 /* These read memory pointed to by the first argument with size
11741 in the third argument. */
11742 case BUILT_IN_MEMCHR:
11743 return ".cR3";
11744 /* These read memory pointed to by the first and second arguments. */
11745 case BUILT_IN_STRSTR:
11746 case BUILT_IN_STRPBRK:
11747 case BUILT_IN_STRCASECMP:
11748 case BUILT_IN_STRCSPN:
11749 case BUILT_IN_STRSPN:
11750 case BUILT_IN_STRCMP:
11751 case BUILT_IN_STRCMP_EQ:
11752 return ".cR R ";
11753 /* Freeing memory kills the pointed-to memory. More importantly
11754 the call has to serve as a barrier for moving loads and stores
11755 across it. */
11756 case BUILT_IN_STACK_RESTORE:
11757 case BUILT_IN_FREE:
11758 case BUILT_IN_GOMP_FREE:
11759 return ".co ";
11760 case BUILT_IN_VA_END:
11761 return ".cO ";
11762 /* Realloc serves both as allocation point and deallocation point. */
11763 case BUILT_IN_REALLOC:
11764 return ".Cw ";
11765 case BUILT_IN_GAMMA_R:
11766 case BUILT_IN_GAMMAF_R:
11767 case BUILT_IN_GAMMAL_R:
11768 case BUILT_IN_LGAMMA_R:
11769 case BUILT_IN_LGAMMAF_R:
11770 case BUILT_IN_LGAMMAL_R:
11771 return ".C. Ot";
11772 case BUILT_IN_FREXP:
11773 case BUILT_IN_FREXPF:
11774 case BUILT_IN_FREXPL:
11775 case BUILT_IN_MODF:
11776 case BUILT_IN_MODFF:
11777 case BUILT_IN_MODFL:
11778 return ".c. Ot";
11779 case BUILT_IN_REMQUO:
11780 case BUILT_IN_REMQUOF:
11781 case BUILT_IN_REMQUOL:
11782 return ".c. . Ot";
11783 case BUILT_IN_SINCOS:
11784 case BUILT_IN_SINCOSF:
11785 case BUILT_IN_SINCOSL:
11786 return ".c. OtOt";
11787 case BUILT_IN_MEMSET:
11788 case BUILT_IN_MEMSET_CHK:
11789 case BUILT_IN_TM_MEMSET:
11790 return "1cO3";
11791 CASE_BUILT_IN_TM_STORE (1):
11792 CASE_BUILT_IN_TM_STORE (2):
11793 CASE_BUILT_IN_TM_STORE (4):
11794 CASE_BUILT_IN_TM_STORE (8):
11795 CASE_BUILT_IN_TM_STORE (FLOAT):
11796 CASE_BUILT_IN_TM_STORE (DOUBLE):
11797 CASE_BUILT_IN_TM_STORE (LDOUBLE):
11798 CASE_BUILT_IN_TM_STORE (M64):
11799 CASE_BUILT_IN_TM_STORE (M128):
11800 CASE_BUILT_IN_TM_STORE (M256):
11801 return ".cO ";
11802 case BUILT_IN_STACK_SAVE:
11803 case BUILT_IN_RETURN:
11804 case BUILT_IN_EH_POINTER:
11805 case BUILT_IN_EH_FILTER:
11806 case BUILT_IN_UNWIND_RESUME:
11807 case BUILT_IN_CXA_END_CLEANUP:
11808 case BUILT_IN_EH_COPY_VALUES:
11809 case BUILT_IN_FRAME_ADDRESS:
11810 case BUILT_IN_APPLY_ARGS:
11811 case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
11812 case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
11813 case BUILT_IN_PREFETCH:
11814 case BUILT_IN_DWARF_CFA:
11815 case BUILT_IN_RETURN_ADDRESS:
11816 return ".c";
11817 case BUILT_IN_ASSUME_ALIGNED:
11818 case BUILT_IN_EXPECT:
11819 case BUILT_IN_EXPECT_WITH_PROBABILITY:
11820 return "1cX ";
11821 /* But posix_memalign stores a pointer into the memory pointed to
11822 by its first argument. */
11823 case BUILT_IN_POSIX_MEMALIGN:
11824 return ".cOt";
11826 default:
11827 return "";