RISC-V: Optimize SELECT_VL codegen when length is known as smaller than VF
[official-gcc.git] / gcc / builtins.cc
blob0f64feeedbad6df0685831292b5294eef0c1ee0e
1 /* Expand builtin functions.
2 Copyright (C) 1988-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.cc instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-access.h"
47 #include "stor-layout.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "tree-object-size.h"
51 #include "tree-ssa-strlen.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-iterator.h"
71 #include "gimple-fold.h"
72 #include "intl.h"
73 #include "file-prefix-map.h" /* remap_macro_filename() */
74 #include "ipa-strub.h" /* strub_watermark_parm() */
75 #include "gomp-constants.h"
76 #include "omp-general.h"
77 #include "tree-dfa.h"
78 #include "gimple-ssa.h"
79 #include "tree-ssa-live.h"
80 #include "tree-outof-ssa.h"
81 #include "attr-fnspec.h"
82 #include "demangle.h"
83 #include "gimple-range.h"
84 #include "pointer-query.h"
86 struct target_builtins default_target_builtins;
87 #if SWITCHABLE_TARGET
88 struct target_builtins *this_target_builtins = &default_target_builtins;
89 #endif
91 /* Define the names of the builtin function types and codes. */
92 const char *const built_in_class_names[BUILT_IN_LAST]
93 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
95 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
96 const char * built_in_names[(int) END_BUILTINS] =
98 #include "builtins.def"
101 /* Setup an array of builtin_info_type, make sure each element decl is
102 initialized to NULL_TREE. */
103 builtin_info_type builtin_info[(int)END_BUILTINS];
105 /* Non-zero if __builtin_constant_p should be folded right away. */
106 bool force_folding_builtin_constant_p;
108 static int target_char_cast (tree, char *);
109 static int apply_args_size (void);
110 static int apply_result_size (void);
111 static rtx result_vector (int, rtx);
112 static void expand_builtin_prefetch (tree);
113 static rtx expand_builtin_apply_args (void);
114 static rtx expand_builtin_apply_args_1 (void);
115 static rtx expand_builtin_apply (rtx, rtx, rtx);
116 static void expand_builtin_return (rtx);
117 static rtx expand_builtin_classify_type (tree);
118 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
119 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
120 static rtx expand_builtin_interclass_mathfn (tree, rtx);
121 static rtx expand_builtin_sincos (tree);
122 static rtx expand_builtin_fegetround (tree, rtx, machine_mode);
123 static rtx expand_builtin_feclear_feraise_except (tree, rtx, machine_mode,
124 optab);
125 static rtx expand_builtin_cexpi (tree, rtx);
126 static rtx expand_builtin_issignaling (tree, rtx);
127 static rtx expand_builtin_int_roundingfn (tree, rtx);
128 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
129 static rtx expand_builtin_next_arg (void);
130 static rtx expand_builtin_va_start (tree);
131 static rtx expand_builtin_va_end (tree);
132 static rtx expand_builtin_va_copy (tree);
133 static rtx inline_expand_builtin_bytecmp (tree, rtx);
134 static rtx expand_builtin_strcmp (tree, rtx);
135 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
136 static rtx expand_builtin_memcpy (tree, rtx);
137 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
138 rtx target, tree exp,
139 memop_ret retmode,
140 bool might_overlap);
141 static rtx expand_builtin_memmove (tree, rtx);
142 static rtx expand_builtin_mempcpy (tree, rtx);
143 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
144 static rtx expand_builtin_strcpy (tree, rtx);
145 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
146 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
147 static rtx expand_builtin_strncpy (tree, rtx);
148 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
149 static rtx expand_builtin_bzero (tree);
150 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
151 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
152 static rtx expand_builtin_alloca (tree);
153 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
154 static rtx expand_builtin_frame_address (tree, tree);
155 static rtx expand_builtin_stack_address ();
156 static tree stabilize_va_list_loc (location_t, tree, int);
157 static rtx expand_builtin_expect (tree, rtx);
158 static rtx expand_builtin_expect_with_probability (tree, rtx);
159 static tree fold_builtin_constant_p (tree);
160 static tree fold_builtin_classify_type (tree);
161 static tree fold_builtin_strlen (location_t, tree, tree, tree);
162 static tree fold_builtin_inf (location_t, tree, int);
163 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
164 static bool validate_arg (const_tree, enum tree_code code);
165 static rtx expand_builtin_fabs (tree, rtx, rtx);
166 static rtx expand_builtin_signbit (tree, rtx);
167 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
168 static tree fold_builtin_isascii (location_t, tree);
169 static tree fold_builtin_toascii (location_t, tree);
170 static tree fold_builtin_isdigit (location_t, tree);
171 static tree fold_builtin_fabs (location_t, tree, tree);
172 static tree fold_builtin_abs (location_t, tree, tree);
173 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
174 enum tree_code);
175 static tree fold_builtin_iseqsig (location_t, tree, tree);
176 static tree fold_builtin_varargs (location_t, tree, tree*, int);
178 static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
179 static tree fold_builtin_strspn (location_t, tree, tree, tree);
180 static tree fold_builtin_strcspn (location_t, tree, tree, tree);
182 static rtx expand_builtin_object_size (tree);
183 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
184 enum built_in_function);
185 static void maybe_emit_chk_warning (tree, enum built_in_function);
186 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
187 static tree fold_builtin_object_size (tree, tree, enum built_in_function);
189 unsigned HOST_WIDE_INT target_newline;
190 unsigned HOST_WIDE_INT target_percent;
191 static unsigned HOST_WIDE_INT target_c;
192 static unsigned HOST_WIDE_INT target_s;
193 char target_percent_c[3];
194 char target_percent_s[3];
195 char target_percent_s_newline[4];
196 static tree do_mpfr_remquo (tree, tree, tree);
197 static tree do_mpfr_lgamma_r (tree, tree, tree);
198 static void expand_builtin_sync_synchronize (void);
200 /* Return true if NAME starts with __builtin_ or __sync_. */
202 static bool
203 is_builtin_name (const char *name)
205 return (startswith (name, "__builtin_")
206 || startswith (name, "__sync_")
207 || startswith (name, "__atomic_"));
210 /* Return true if NODE should be considered for inline expansion regardless
211 of the optimization level. This means whenever a function is invoked with
212 its "internal" name, which normally contains the prefix "__builtin". */
214 bool
215 called_as_built_in (tree node)
217 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
218 we want the name used to call the function, not the name it
219 will have. */
220 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
221 return is_builtin_name (name);
224 /* Compute values M and N such that M divides (address of EXP - N) and such
225 that N < M. If these numbers can be determined, store M in alignp and N in
226 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
227 *alignp and any bit-offset to *bitposp.
229 Note that the address (and thus the alignment) computed here is based
230 on the address to which a symbol resolves, whereas DECL_ALIGN is based
231 on the address at which an object is actually located. These two
232 addresses are not always the same. For example, on ARM targets,
233 the address &foo of a Thumb function foo() has the lowest bit set,
234 whereas foo() itself starts on an even address.
236 If ADDR_P is true we are taking the address of the memory reference EXP
237 and thus cannot rely on the access taking place. */
239 bool
240 get_object_alignment_2 (tree exp, unsigned int *alignp,
241 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
243 poly_int64 bitsize, bitpos;
244 tree offset;
245 machine_mode mode;
246 int unsignedp, reversep, volatilep;
247 unsigned int align = BITS_PER_UNIT;
248 bool known_alignment = false;
250 /* Get the innermost object and the constant (bitpos) and possibly
251 variable (offset) offset of the access. */
252 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
253 &unsignedp, &reversep, &volatilep);
255 /* Extract alignment information from the innermost object and
256 possibly adjust bitpos and offset. */
257 if (TREE_CODE (exp) == FUNCTION_DECL)
259 /* Function addresses can encode extra information besides their
260 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
261 allows the low bit to be used as a virtual bit, we know
262 that the address itself must be at least 2-byte aligned. */
263 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
264 align = 2 * BITS_PER_UNIT;
266 else if (TREE_CODE (exp) == LABEL_DECL)
268 else if (TREE_CODE (exp) == CONST_DECL)
270 /* The alignment of a CONST_DECL is determined by its initializer. */
271 exp = DECL_INITIAL (exp);
272 align = TYPE_ALIGN (TREE_TYPE (exp));
273 if (CONSTANT_CLASS_P (exp))
274 align = targetm.constant_alignment (exp, align);
276 known_alignment = true;
278 else if (DECL_P (exp))
280 align = DECL_ALIGN (exp);
281 known_alignment = true;
283 else if (TREE_CODE (exp) == INDIRECT_REF
284 || TREE_CODE (exp) == MEM_REF
285 || TREE_CODE (exp) == TARGET_MEM_REF)
287 tree addr = TREE_OPERAND (exp, 0);
288 unsigned ptr_align;
289 unsigned HOST_WIDE_INT ptr_bitpos;
290 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
292 /* If the address is explicitely aligned, handle that. */
293 if (TREE_CODE (addr) == BIT_AND_EXPR
294 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
296 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
297 ptr_bitmask *= BITS_PER_UNIT;
298 align = least_bit_hwi (ptr_bitmask);
299 addr = TREE_OPERAND (addr, 0);
302 known_alignment
303 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
304 align = MAX (ptr_align, align);
306 /* Re-apply explicit alignment to the bitpos. */
307 ptr_bitpos &= ptr_bitmask;
309 /* The alignment of the pointer operand in a TARGET_MEM_REF
310 has to take the variable offset parts into account. */
311 if (TREE_CODE (exp) == TARGET_MEM_REF)
313 if (TMR_INDEX (exp))
315 unsigned HOST_WIDE_INT step = 1;
316 if (TMR_STEP (exp))
317 step = TREE_INT_CST_LOW (TMR_STEP (exp));
318 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
320 if (TMR_INDEX2 (exp))
321 align = BITS_PER_UNIT;
322 known_alignment = false;
325 /* When EXP is an actual memory reference then we can use
326 TYPE_ALIGN of a pointer indirection to derive alignment.
327 Do so only if get_pointer_alignment_1 did not reveal absolute
328 alignment knowledge and if using that alignment would
329 improve the situation. */
330 unsigned int talign;
331 if (!addr_p && !known_alignment
332 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
333 && talign > align)
334 align = talign;
335 else
337 /* Else adjust bitpos accordingly. */
338 bitpos += ptr_bitpos;
339 if (TREE_CODE (exp) == MEM_REF
340 || TREE_CODE (exp) == TARGET_MEM_REF)
341 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
344 else if (TREE_CODE (exp) == STRING_CST)
346 /* STRING_CST are the only constant objects we allow to be not
347 wrapped inside a CONST_DECL. */
348 align = TYPE_ALIGN (TREE_TYPE (exp));
349 if (CONSTANT_CLASS_P (exp))
350 align = targetm.constant_alignment (exp, align);
352 known_alignment = true;
355 /* If there is a non-constant offset part extract the maximum
356 alignment that can prevail. */
357 if (offset)
359 unsigned int trailing_zeros = tree_ctz (offset);
360 if (trailing_zeros < HOST_BITS_PER_INT)
362 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
363 if (inner)
364 align = MIN (align, inner);
368 /* Account for the alignment of runtime coefficients, so that the constant
369 bitpos is guaranteed to be accurate. */
370 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
371 if (alt_align != 0 && alt_align < align)
373 align = alt_align;
374 known_alignment = false;
377 *alignp = align;
378 *bitposp = bitpos.coeffs[0] & (align - 1);
379 return known_alignment;
382 /* For a memory reference expression EXP compute values M and N such that M
383 divides (&EXP - N) and such that N < M. If these numbers can be determined,
384 store M in alignp and N in *BITPOSP and return true. Otherwise return false
385 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
387 bool
388 get_object_alignment_1 (tree exp, unsigned int *alignp,
389 unsigned HOST_WIDE_INT *bitposp)
391 /* Strip a WITH_SIZE_EXPR, get_inner_reference doesn't know how to deal
392 with it. */
393 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
394 exp = TREE_OPERAND (exp, 0);
395 return get_object_alignment_2 (exp, alignp, bitposp, false);
398 /* Return the alignment in bits of EXP, an object. */
400 unsigned int
401 get_object_alignment (tree exp)
403 unsigned HOST_WIDE_INT bitpos = 0;
404 unsigned int align;
406 get_object_alignment_1 (exp, &align, &bitpos);
408 /* align and bitpos now specify known low bits of the pointer.
409 ptr & (align - 1) == bitpos. */
411 if (bitpos != 0)
412 align = least_bit_hwi (bitpos);
413 return align;
416 /* For a pointer valued expression EXP compute values M and N such that M
417 divides (EXP - N) and such that N < M. If these numbers can be determined,
418 store M in alignp and N in *BITPOSP and return true. Return false if
419 the results are just a conservative approximation.
421 If EXP is not a pointer, false is returned too. */
423 bool
424 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
425 unsigned HOST_WIDE_INT *bitposp)
427 STRIP_NOPS (exp);
429 if (TREE_CODE (exp) == ADDR_EXPR)
430 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
431 alignp, bitposp, true);
432 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
434 unsigned int align;
435 unsigned HOST_WIDE_INT bitpos;
436 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
437 &align, &bitpos);
438 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
439 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
440 else
442 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
443 if (trailing_zeros < HOST_BITS_PER_INT)
445 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
446 if (inner)
447 align = MIN (align, inner);
450 *alignp = align;
451 *bitposp = bitpos & (align - 1);
452 return res;
454 else if (TREE_CODE (exp) == SSA_NAME
455 && POINTER_TYPE_P (TREE_TYPE (exp)))
457 unsigned int ptr_align, ptr_misalign;
458 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
460 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
462 *bitposp = ptr_misalign * BITS_PER_UNIT;
463 *alignp = ptr_align * BITS_PER_UNIT;
464 /* Make sure to return a sensible alignment when the multiplication
465 by BITS_PER_UNIT overflowed. */
466 if (*alignp == 0)
467 *alignp = 1u << (HOST_BITS_PER_INT - 1);
468 /* We cannot really tell whether this result is an approximation. */
469 return false;
471 else
473 *bitposp = 0;
474 *alignp = BITS_PER_UNIT;
475 return false;
478 else if (TREE_CODE (exp) == INTEGER_CST)
480 *alignp = BIGGEST_ALIGNMENT;
481 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
482 & (BIGGEST_ALIGNMENT - 1));
483 return true;
486 *bitposp = 0;
487 *alignp = BITS_PER_UNIT;
488 return false;
491 /* Return the alignment in bits of EXP, a pointer valued expression.
492 The alignment returned is, by default, the alignment of the thing that
493 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
495 Otherwise, look at the expression to see if we can do better, i.e., if the
496 expression is actually pointing at an object whose alignment is tighter. */
498 unsigned int
499 get_pointer_alignment (tree exp)
501 unsigned HOST_WIDE_INT bitpos = 0;
502 unsigned int align;
504 get_pointer_alignment_1 (exp, &align, &bitpos);
506 /* align and bitpos now specify known low bits of the pointer.
507 ptr & (align - 1) == bitpos. */
509 if (bitpos != 0)
510 align = least_bit_hwi (bitpos);
512 return align;
515 /* Return the number of leading non-zero elements in the sequence
516 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
517 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
519 unsigned
520 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
522 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
524 unsigned n;
526 if (eltsize == 1)
528 /* Optimize the common case of plain char. */
529 for (n = 0; n < maxelts; n++)
531 const char *elt = (const char*) ptr + n;
532 if (!*elt)
533 break;
536 else
538 for (n = 0; n < maxelts; n++)
540 const char *elt = (const char*) ptr + n * eltsize;
541 if (!memcmp (elt, "\0\0\0\0", eltsize))
542 break;
545 return n;
548 /* Compute the length of a null-terminated character string or wide
549 character string handling character sizes of 1, 2, and 4 bytes.
550 TREE_STRING_LENGTH is not the right way because it evaluates to
551 the size of the character array in bytes (as opposed to characters)
552 and because it can contain a zero byte in the middle.
554 ONLY_VALUE should be nonzero if the result is not going to be emitted
555 into the instruction stream and zero if it is going to be expanded.
556 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
557 is returned, otherwise NULL, since
558 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
559 evaluate the side-effects.
561 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
562 accesses. Note that this implies the result is not going to be emitted
563 into the instruction stream.
565 Additional information about the string accessed may be recorded
566 in DATA. For example, if ARG references an unterminated string,
567 then the declaration will be stored in the DECL field. If the
568 length of the unterminated string can be determined, it'll be
569 stored in the LEN field. Note this length could well be different
570 than what a C strlen call would return.
572 ELTSIZE is 1 for normal single byte character strings, and 2 or
573 4 for wide characer strings. ELTSIZE is by default 1.
575 The value returned is of type `ssizetype'. */
577 tree
578 c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
580 /* If we were not passed a DATA pointer, then get one to a local
581 structure. That avoids having to check DATA for NULL before
582 each time we want to use it. */
583 c_strlen_data local_strlen_data = { };
584 if (!data)
585 data = &local_strlen_data;
587 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
589 tree src = STRIP_NOPS (arg);
590 if (TREE_CODE (src) == COND_EXPR
591 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
593 tree len1, len2;
595 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
596 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
597 if (tree_int_cst_equal (len1, len2))
598 return len1;
601 if (TREE_CODE (src) == COMPOUND_EXPR
602 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
603 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
605 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
607 /* Offset from the beginning of the string in bytes. */
608 tree byteoff;
609 tree memsize;
610 tree decl;
611 src = string_constant (src, &byteoff, &memsize, &decl);
612 if (src == 0)
613 return NULL_TREE;
615 /* Determine the size of the string element. */
616 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
617 return NULL_TREE;
619 /* Set MAXELTS to ARRAY_SIZE (SRC) - 1, the maximum possible
620 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
621 in case the latter is less than the size of the array, such as when
622 SRC refers to a short string literal used to initialize a large array.
623 In that case, the elements of the array after the terminating NUL are
624 all NUL. */
625 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
626 strelts = strelts / eltsize;
628 if (!tree_fits_uhwi_p (memsize))
629 return NULL_TREE;
631 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
633 /* PTR can point to the byte representation of any string type, including
634 char* and wchar_t*. */
635 const char *ptr = TREE_STRING_POINTER (src);
637 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
639 /* The code below works only for single byte character types. */
640 if (eltsize != 1)
641 return NULL_TREE;
643 /* If the string has an internal NUL character followed by any
644 non-NUL characters (e.g., "foo\0bar"), we can't compute
645 the offset to the following NUL if we don't know where to
646 start searching for it. */
647 unsigned len = string_length (ptr, eltsize, strelts);
649 /* Return when an embedded null character is found or none at all.
650 In the latter case, set the DECL/LEN field in the DATA structure
651 so that callers may examine them. */
652 if (len + 1 < strelts)
653 return NULL_TREE;
654 else if (len >= maxelts)
656 data->decl = decl;
657 data->off = byteoff;
658 data->minlen = ssize_int (len);
659 return NULL_TREE;
662 /* For empty strings the result should be zero. */
663 if (len == 0)
664 return ssize_int (0);
666 /* We don't know the starting offset, but we do know that the string
667 has no internal zero bytes. If the offset falls within the bounds
668 of the string subtract the offset from the length of the string,
669 and return that. Otherwise the length is zero. Take care to
670 use SAVE_EXPR in case the OFFSET has side-effects. */
671 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
672 : byteoff;
673 offsave = fold_convert_loc (loc, sizetype, offsave);
674 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
675 size_int (len));
676 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
677 offsave);
678 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
679 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
680 build_zero_cst (ssizetype));
683 /* Offset from the beginning of the string in elements. */
684 HOST_WIDE_INT eltoff;
686 /* We have a known offset into the string. Start searching there for
687 a null character if we can represent it as a single HOST_WIDE_INT. */
688 if (byteoff == 0)
689 eltoff = 0;
690 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
691 eltoff = -1;
692 else
693 eltoff = tree_to_uhwi (byteoff) / eltsize;
695 /* If the offset is known to be out of bounds, warn, and call strlen at
696 runtime. */
697 if (eltoff < 0 || eltoff >= maxelts)
699 /* Suppress multiple warnings for propagated constant strings. */
700 if (only_value != 2
701 && !warning_suppressed_p (arg, OPT_Warray_bounds_)
702 && warning_at (loc, OPT_Warray_bounds_,
703 "offset %qwi outside bounds of constant string",
704 eltoff))
706 if (decl)
707 inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
708 suppress_warning (arg, OPT_Warray_bounds_);
710 return NULL_TREE;
713 /* If eltoff is larger than strelts but less than maxelts the
714 string length is zero, since the excess memory will be zero. */
715 if (eltoff > strelts)
716 return ssize_int (0);
718 /* Use strlen to search for the first zero byte. Since any strings
719 constructed with build_string will have nulls appended, we win even
720 if we get handed something like (char[4])"abcd".
722 Since ELTOFF is our starting index into the string, no further
723 calculation is needed. */
724 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
725 strelts - eltoff);
727 /* Don't know what to return if there was no zero termination.
728 Ideally this would turn into a gcc_checking_assert over time.
729 Set DECL/LEN so callers can examine them. */
730 if (len >= maxelts - eltoff)
732 data->decl = decl;
733 data->off = byteoff;
734 data->minlen = ssize_int (len);
735 return NULL_TREE;
738 return ssize_int (len);
741 /* Return a constant integer corresponding to target reading
742 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
743 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
744 are assumed to be zero, otherwise it reads as many characters
745 as needed. */
748 c_readstr (const char *str, fixed_size_mode mode,
749 bool null_terminated_p/*=true*/)
751 auto_vec<target_unit, MAX_BITSIZE_MODE_ANY_INT / BITS_PER_UNIT> bytes;
753 bytes.reserve (GET_MODE_SIZE (mode));
755 target_unit ch = 1;
756 for (unsigned int i = 0; i < GET_MODE_SIZE (mode); ++i)
758 if (ch || !null_terminated_p)
759 ch = (unsigned char) str[i];
760 bytes.quick_push (ch);
763 return native_decode_rtx (mode, bytes, 0);
766 /* Cast a target constant CST to target CHAR and if that value fits into
767 host char type, return zero and put that value into variable pointed to by
768 P. */
770 static int
771 target_char_cast (tree cst, char *p)
773 unsigned HOST_WIDE_INT val, hostval;
775 if (TREE_CODE (cst) != INTEGER_CST
776 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
777 return 1;
779 /* Do not care if it fits or not right here. */
780 val = TREE_INT_CST_LOW (cst);
782 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
783 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
785 hostval = val;
786 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
787 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
789 if (val != hostval)
790 return 1;
792 *p = hostval;
793 return 0;
796 /* Similar to save_expr, but assumes that arbitrary code is not executed
797 in between the multiple evaluations. In particular, we assume that a
798 non-addressable local variable will not be modified. */
800 static tree
801 builtin_save_expr (tree exp)
803 if (TREE_CODE (exp) == SSA_NAME
804 || (TREE_ADDRESSABLE (exp) == 0
805 && (TREE_CODE (exp) == PARM_DECL
806 || (VAR_P (exp) && !TREE_STATIC (exp)))))
807 return exp;
809 return save_expr (exp);
812 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
813 times to get the address of either a higher stack frame, or a return
814 address located within it (depending on FNDECL_CODE). */
816 static rtx
817 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
819 int i;
820 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
821 if (tem == NULL_RTX)
823 /* For a zero count with __builtin_return_address, we don't care what
824 frame address we return, because target-specific definitions will
825 override us. Therefore frame pointer elimination is OK, and using
826 the soft frame pointer is OK.
828 For a nonzero count, or a zero count with __builtin_frame_address,
829 we require a stable offset from the current frame pointer to the
830 previous one, so we must use the hard frame pointer, and
831 we must disable frame pointer elimination. */
832 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
833 tem = frame_pointer_rtx;
834 else
836 tem = hard_frame_pointer_rtx;
838 /* Tell reload not to eliminate the frame pointer. */
839 crtl->accesses_prior_frames = 1;
843 if (count > 0)
844 SETUP_FRAME_ADDRESSES ();
846 /* On the SPARC, the return address is not in the frame, it is in a
847 register. There is no way to access it off of the current frame
848 pointer, but it can be accessed off the previous frame pointer by
849 reading the value from the register window save area. */
850 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
851 count--;
853 /* Scan back COUNT frames to the specified frame. */
854 for (i = 0; i < count; i++)
856 /* Assume the dynamic chain pointer is in the word that the
857 frame address points to, unless otherwise specified. */
858 tem = DYNAMIC_CHAIN_ADDRESS (tem);
859 tem = memory_address (Pmode, tem);
860 tem = gen_frame_mem (Pmode, tem);
861 tem = copy_to_reg (tem);
864 /* For __builtin_frame_address, return what we've got. But, on
865 the SPARC for example, we may have to add a bias. */
866 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
867 return FRAME_ADDR_RTX (tem);
869 /* For __builtin_return_address, get the return address from that frame. */
870 #ifdef RETURN_ADDR_RTX
871 tem = RETURN_ADDR_RTX (count, tem);
872 #else
873 tem = memory_address (Pmode,
874 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
875 tem = gen_frame_mem (Pmode, tem);
876 #endif
877 return tem;
880 /* Alias set used for setjmp buffer. */
881 static alias_set_type setjmp_alias_set = -1;
883 /* Construct the leading half of a __builtin_setjmp call. Control will
884 return to RECEIVER_LABEL. This is also called directly by the SJLJ
885 exception handling code. */
887 void
888 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
890 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
891 rtx stack_save;
892 rtx mem;
894 if (setjmp_alias_set == -1)
895 setjmp_alias_set = new_alias_set ();
897 buf_addr = convert_memory_address (Pmode, buf_addr);
899 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
901 /* We store the frame pointer and the address of receiver_label in
902 the buffer and use the rest of it for the stack save area, which
903 is machine-dependent. */
905 mem = gen_rtx_MEM (Pmode, buf_addr);
906 set_mem_alias_set (mem, setjmp_alias_set);
907 emit_move_insn (mem, hard_frame_pointer_rtx);
909 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
910 GET_MODE_SIZE (Pmode))),
911 set_mem_alias_set (mem, setjmp_alias_set);
913 emit_move_insn (validize_mem (mem),
914 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
916 stack_save = gen_rtx_MEM (sa_mode,
917 plus_constant (Pmode, buf_addr,
918 2 * GET_MODE_SIZE (Pmode)));
919 set_mem_alias_set (stack_save, setjmp_alias_set);
920 emit_stack_save (SAVE_NONLOCAL, &stack_save);
922 /* If there is further processing to do, do it. */
923 if (targetm.have_builtin_setjmp_setup ())
924 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
926 /* We have a nonlocal label. */
927 cfun->has_nonlocal_label = 1;
930 /* Construct the trailing part of a __builtin_setjmp call. This is
931 also called directly by the SJLJ exception handling code.
932 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
934 void
935 expand_builtin_setjmp_receiver (rtx receiver_label)
937 rtx chain;
939 /* Mark the FP as used when we get here, so we have to make sure it's
940 marked as used by this function. */
941 emit_use (hard_frame_pointer_rtx);
943 /* Mark the static chain as clobbered here so life information
944 doesn't get messed up for it. */
945 chain = rtx_for_static_chain (current_function_decl, true);
946 if (chain && REG_P (chain))
947 emit_clobber (chain);
949 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
951 /* If the argument pointer can be eliminated in favor of the
952 frame pointer, we don't need to restore it. We assume here
953 that if such an elimination is present, it can always be used.
954 This is the case on all known machines; if we don't make this
955 assumption, we do unnecessary saving on many machines. */
956 size_t i;
957 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
959 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
960 if (elim_regs[i].from == ARG_POINTER_REGNUM
961 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
962 break;
964 if (i == ARRAY_SIZE (elim_regs))
966 /* Now restore our arg pointer from the address at which it
967 was saved in our stack frame. */
968 emit_move_insn (crtl->args.internal_arg_pointer,
969 copy_to_reg (get_arg_pointer_save_area ()));
973 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
974 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
975 else if (targetm.have_nonlocal_goto_receiver ())
976 emit_insn (targetm.gen_nonlocal_goto_receiver ());
977 else
978 { /* Nothing */ }
980 /* We must not allow the code we just generated to be reordered by
981 scheduling. Specifically, the update of the frame pointer must
982 happen immediately, not later. */
983 emit_insn (gen_blockage ());
986 /* __builtin_longjmp is passed a pointer to an array of five words (not
987 all will be used on all machines). It operates similarly to the C
988 library function of the same name, but is more efficient. Much of
989 the code below is copied from the handling of non-local gotos. */
991 static void
992 expand_builtin_longjmp (rtx buf_addr, rtx value)
994 rtx fp, lab, stack;
995 rtx_insn *insn, *last;
996 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
998 /* DRAP is needed for stack realign if longjmp is expanded to current
999 function */
1000 if (SUPPORTS_STACK_ALIGNMENT)
1001 crtl->need_drap = true;
1003 if (setjmp_alias_set == -1)
1004 setjmp_alias_set = new_alias_set ();
1006 buf_addr = convert_memory_address (Pmode, buf_addr);
1008 buf_addr = force_reg (Pmode, buf_addr);
1010 /* We require that the user must pass a second argument of 1, because
1011 that is what builtin_setjmp will return. */
1012 gcc_assert (value == const1_rtx);
1014 last = get_last_insn ();
1015 if (targetm.have_builtin_longjmp ())
1016 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1017 else
1019 fp = gen_rtx_MEM (Pmode, buf_addr);
1020 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1021 GET_MODE_SIZE (Pmode)));
1023 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1024 2 * GET_MODE_SIZE (Pmode)));
1025 set_mem_alias_set (fp, setjmp_alias_set);
1026 set_mem_alias_set (lab, setjmp_alias_set);
1027 set_mem_alias_set (stack, setjmp_alias_set);
1029 /* Pick up FP, label, and SP from the block and jump. This code is
1030 from expand_goto in stmt.cc; see there for detailed comments. */
1031 if (targetm.have_nonlocal_goto ())
1032 /* We have to pass a value to the nonlocal_goto pattern that will
1033 get copied into the static_chain pointer, but it does not matter
1034 what that value is, because builtin_setjmp does not use it. */
1035 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1036 else
1038 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1039 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1041 lab = copy_to_reg (lab);
1043 /* Restore the frame pointer and stack pointer. We must use a
1044 temporary since the setjmp buffer may be a local. */
1045 fp = copy_to_reg (fp);
1046 emit_stack_restore (SAVE_NONLOCAL, stack);
1048 /* Ensure the frame pointer move is not optimized. */
1049 emit_insn (gen_blockage ());
1050 emit_clobber (hard_frame_pointer_rtx);
1051 emit_clobber (frame_pointer_rtx);
1052 emit_move_insn (hard_frame_pointer_rtx, fp);
1054 emit_use (hard_frame_pointer_rtx);
1055 emit_use (stack_pointer_rtx);
1056 emit_indirect_jump (lab);
1060 /* Search backwards and mark the jump insn as a non-local goto.
1061 Note that this precludes the use of __builtin_longjmp to a
1062 __builtin_setjmp target in the same function. However, we've
1063 already cautioned the user that these functions are for
1064 internal exception handling use only. */
1065 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1067 gcc_assert (insn != last);
1069 if (JUMP_P (insn))
1071 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1072 break;
1074 else if (CALL_P (insn))
1075 break;
1079 static inline bool
1080 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1082 return (iter->i < iter->n);
1085 /* This function validates the types of a function call argument list
1086 against a specified list of tree_codes. If the last specifier is a 0,
1087 that represents an ellipsis, otherwise the last specifier must be a
1088 VOID_TYPE. */
1090 static bool
1091 validate_arglist (const_tree callexpr, ...)
1093 enum tree_code code;
1094 bool res = 0;
1095 va_list ap;
1096 const_call_expr_arg_iterator iter;
1097 const_tree arg;
1099 va_start (ap, callexpr);
1100 init_const_call_expr_arg_iterator (callexpr, &iter);
1102 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1103 tree fn = CALL_EXPR_FN (callexpr);
1104 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1106 for (unsigned argno = 1; ; ++argno)
1108 code = (enum tree_code) va_arg (ap, int);
1110 switch (code)
1112 case 0:
1113 /* This signifies an ellipses, any further arguments are all ok. */
1114 res = true;
1115 goto end;
1116 case VOID_TYPE:
1117 /* This signifies an endlink, if no arguments remain, return
1118 true, otherwise return false. */
1119 res = !more_const_call_expr_args_p (&iter);
1120 goto end;
1121 case POINTER_TYPE:
1122 /* The actual argument must be nonnull when either the whole
1123 called function has been declared nonnull, or when the formal
1124 argument corresponding to the actual argument has been. */
1125 if (argmap
1126 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1128 arg = next_const_call_expr_arg (&iter);
1129 if (!validate_arg (arg, code) || integer_zerop (arg))
1130 goto end;
1131 break;
1133 /* FALLTHRU */
1134 default:
1135 /* If no parameters remain or the parameter's code does not
1136 match the specified code, return false. Otherwise continue
1137 checking any remaining arguments. */
1138 arg = next_const_call_expr_arg (&iter);
1139 if (!validate_arg (arg, code))
1140 goto end;
1141 break;
1145 /* We need gotos here since we can only have one VA_CLOSE in a
1146 function. */
1147 end: ;
1148 va_end (ap);
1150 BITMAP_FREE (argmap);
1152 return res;
1155 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1156 and the address of the save area. */
1158 static rtx
1159 expand_builtin_nonlocal_goto (tree exp)
1161 tree t_label, t_save_area;
1162 rtx r_label, r_save_area, r_fp, r_sp;
1163 rtx_insn *insn;
1165 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1166 return NULL_RTX;
1168 t_label = CALL_EXPR_ARG (exp, 0);
1169 t_save_area = CALL_EXPR_ARG (exp, 1);
1171 r_label = expand_normal (t_label);
1172 r_label = convert_memory_address (Pmode, r_label);
1173 r_save_area = expand_normal (t_save_area);
1174 r_save_area = convert_memory_address (Pmode, r_save_area);
1175 /* Copy the address of the save location to a register just in case it was
1176 based on the frame pointer. */
1177 r_save_area = copy_to_reg (r_save_area);
1178 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1179 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1180 plus_constant (Pmode, r_save_area,
1181 GET_MODE_SIZE (Pmode)));
1183 crtl->has_nonlocal_goto = 1;
1185 /* ??? We no longer need to pass the static chain value, afaik. */
1186 if (targetm.have_nonlocal_goto ())
1187 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1188 else
1190 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1191 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1193 r_label = copy_to_reg (r_label);
1195 /* Restore the frame pointer and stack pointer. We must use a
1196 temporary since the setjmp buffer may be a local. */
1197 r_fp = copy_to_reg (r_fp);
1198 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1200 /* Ensure the frame pointer move is not optimized. */
1201 emit_insn (gen_blockage ());
1202 emit_clobber (hard_frame_pointer_rtx);
1203 emit_clobber (frame_pointer_rtx);
1204 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1206 /* USE of hard_frame_pointer_rtx added for consistency;
1207 not clear if really needed. */
1208 emit_use (hard_frame_pointer_rtx);
1209 emit_use (stack_pointer_rtx);
1211 /* If the architecture is using a GP register, we must
1212 conservatively assume that the target function makes use of it.
1213 The prologue of functions with nonlocal gotos must therefore
1214 initialize the GP register to the appropriate value, and we
1215 must then make sure that this value is live at the point
1216 of the jump. (Note that this doesn't necessarily apply
1217 to targets with a nonlocal_goto pattern; they are free
1218 to implement it in their own way. Note also that this is
1219 a no-op if the GP register is a global invariant.) */
1220 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1221 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1222 emit_use (pic_offset_table_rtx);
1224 emit_indirect_jump (r_label);
1227 /* Search backwards to the jump insn and mark it as a
1228 non-local goto. */
1229 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1231 if (JUMP_P (insn))
1233 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1234 break;
1236 else if (CALL_P (insn))
1237 break;
1240 return const0_rtx;
1243 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1244 (not all will be used on all machines) that was passed to __builtin_setjmp.
1245 It updates the stack pointer in that block to the current value. This is
1246 also called directly by the SJLJ exception handling code. */
1248 void
1249 expand_builtin_update_setjmp_buf (rtx buf_addr)
1251 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1252 buf_addr = convert_memory_address (Pmode, buf_addr);
1253 rtx stack_save
1254 = gen_rtx_MEM (sa_mode,
1255 memory_address
1256 (sa_mode,
1257 plus_constant (Pmode, buf_addr,
1258 2 * GET_MODE_SIZE (Pmode))));
1260 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1263 /* Expand a call to __builtin_prefetch. For a target that does not support
1264 data prefetch, evaluate the memory address argument in case it has side
1265 effects. */
1267 static void
1268 expand_builtin_prefetch (tree exp)
1270 tree arg0, arg1, arg2;
1271 int nargs;
1272 rtx op0, op1, op2;
1274 if (!validate_arglist (exp, POINTER_TYPE, 0))
1275 return;
1277 arg0 = CALL_EXPR_ARG (exp, 0);
1279 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1280 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1281 locality). */
1282 nargs = call_expr_nargs (exp);
1283 if (nargs > 1)
1284 arg1 = CALL_EXPR_ARG (exp, 1);
1285 else
1286 arg1 = integer_zero_node;
1287 if (nargs > 2)
1288 arg2 = CALL_EXPR_ARG (exp, 2);
1289 else
1290 arg2 = integer_three_node;
1292 /* Argument 0 is an address. */
1293 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1295 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1296 if (TREE_CODE (arg1) != INTEGER_CST)
1298 error ("second argument to %<__builtin_prefetch%> must be a constant");
1299 arg1 = integer_zero_node;
1301 op1 = expand_normal (arg1);
1302 /* Argument 1 must be either zero or one. */
1303 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1305 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1306 " using zero");
1307 op1 = const0_rtx;
1310 /* Argument 2 (locality) must be a compile-time constant int. */
1311 if (TREE_CODE (arg2) != INTEGER_CST)
1313 error ("third argument to %<__builtin_prefetch%> must be a constant");
1314 arg2 = integer_zero_node;
1316 op2 = expand_normal (arg2);
1317 /* Argument 2 must be 0, 1, 2, or 3. */
1318 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1320 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1321 op2 = const0_rtx;
1324 if (targetm.have_prefetch ())
1326 class expand_operand ops[3];
1328 create_address_operand (&ops[0], op0);
1329 create_integer_operand (&ops[1], INTVAL (op1));
1330 create_integer_operand (&ops[2], INTVAL (op2));
1331 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1332 return;
1335 /* Don't do anything with direct references to volatile memory, but
1336 generate code to handle other side effects. */
1337 if (!MEM_P (op0) && side_effects_p (op0))
1338 emit_insn (op0);
1341 /* Get a MEM rtx for expression EXP which is the address of an operand
1342 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
1343 the maximum length of the block of memory that might be accessed or
1344 NULL if unknown. */
1347 get_memory_rtx (tree exp, tree len)
1349 tree orig_exp = exp, base;
1350 rtx addr, mem;
1352 gcc_checking_assert
1353 (ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)))));
1355 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1356 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1357 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1358 exp = TREE_OPERAND (exp, 0);
1360 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1361 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1363 /* Get an expression we can use to find the attributes to assign to MEM.
1364 First remove any nops. */
1365 while (CONVERT_EXPR_P (exp)
1366 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1367 exp = TREE_OPERAND (exp, 0);
1369 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1370 (as builtin stringops may alias with anything). */
1371 exp = fold_build2 (MEM_REF,
1372 build_array_type (char_type_node,
1373 build_range_type (sizetype,
1374 size_one_node, len)),
1375 exp, build_int_cst (ptr_type_node, 0));
1377 /* If the MEM_REF has no acceptable address, try to get the base object
1378 from the original address we got, and build an all-aliasing
1379 unknown-sized access to that one. */
1380 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1381 set_mem_attributes (mem, exp, 0);
1382 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1383 && (base = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1384 0))))
1386 unsigned int align = get_pointer_alignment (TREE_OPERAND (exp, 0));
1387 exp = build_fold_addr_expr (base);
1388 exp = fold_build2 (MEM_REF,
1389 build_array_type (char_type_node,
1390 build_range_type (sizetype,
1391 size_zero_node,
1392 NULL)),
1393 exp, build_int_cst (ptr_type_node, 0));
1394 set_mem_attributes (mem, exp, 0);
1395 /* Since we stripped parts make sure the offset is unknown and the
1396 alignment is computed from the original address. */
1397 clear_mem_offset (mem);
1398 set_mem_align (mem, align);
1400 set_mem_alias_set (mem, 0);
1401 return mem;
1404 /* Built-in functions to perform an untyped call and return. */
1406 /* Wrapper that implicitly applies a delta when getting or setting the
1407 enclosed value. */
1408 template <typename T>
1409 class delta_type
1411 T &value; T const delta;
1412 public:
1413 delta_type (T &val, T dlt) : value (val), delta (dlt) {}
1414 operator T () const { return value + delta; }
1415 T operator = (T val) const { value = val - delta; return val; }
1418 #define saved_apply_args_size \
1419 (delta_type<int> (this_target_builtins->x_apply_args_size_plus_one, -1))
1420 #define apply_args_mode \
1421 (this_target_builtins->x_apply_args_mode)
1422 #define saved_apply_result_size \
1423 (delta_type<int> (this_target_builtins->x_apply_result_size_plus_one, -1))
1424 #define apply_result_mode \
1425 (this_target_builtins->x_apply_result_mode)
1427 /* Return the size required for the block returned by __builtin_apply_args,
1428 and initialize apply_args_mode. */
1430 static int
1431 apply_args_size (void)
1433 int size = saved_apply_args_size;
1434 int align;
1435 unsigned int regno;
1437 /* The values computed by this function never change. */
1438 if (size < 0)
1440 /* The first value is the incoming arg-pointer. */
1441 size = GET_MODE_SIZE (Pmode);
1443 /* The second value is the structure value address unless this is
1444 passed as an "invisible" first argument. */
1445 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1446 size += GET_MODE_SIZE (Pmode);
1448 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1449 if (FUNCTION_ARG_REGNO_P (regno))
1451 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1453 if (mode != VOIDmode)
1455 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1456 if (size % align != 0)
1457 size = CEIL (size, align) * align;
1458 size += GET_MODE_SIZE (mode);
1459 apply_args_mode[regno] = mode;
1461 else
1462 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1464 else
1465 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1467 saved_apply_args_size = size;
1469 return size;
1472 /* Return the size required for the block returned by __builtin_apply,
1473 and initialize apply_result_mode. */
1475 static int
1476 apply_result_size (void)
1478 int size = saved_apply_result_size;
1479 int align, regno;
1481 /* The values computed by this function never change. */
1482 if (size < 0)
1484 size = 0;
1486 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1487 if (targetm.calls.function_value_regno_p (regno))
1489 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1491 if (mode != VOIDmode)
1493 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1494 if (size % align != 0)
1495 size = CEIL (size, align) * align;
1496 size += GET_MODE_SIZE (mode);
1497 apply_result_mode[regno] = mode;
1499 else
1500 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1502 else
1503 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1505 /* Allow targets that use untyped_call and untyped_return to override
1506 the size so that machine-specific information can be stored here. */
1507 #ifdef APPLY_RESULT_SIZE
1508 size = APPLY_RESULT_SIZE;
1509 #endif
1511 saved_apply_result_size = size;
1513 return size;
1516 /* Create a vector describing the result block RESULT. If SAVEP is true,
1517 the result block is used to save the values; otherwise it is used to
1518 restore the values. */
1520 static rtx
1521 result_vector (int savep, rtx result)
1523 int regno, size, align, nelts;
1524 fixed_size_mode mode;
1525 rtx reg, mem;
1526 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1528 size = nelts = 0;
1529 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1530 if ((mode = apply_result_mode[regno]) != VOIDmode)
1532 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1533 if (size % align != 0)
1534 size = CEIL (size, align) * align;
1535 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1536 mem = adjust_address (result, mode, size);
1537 savevec[nelts++] = (savep
1538 ? gen_rtx_SET (mem, reg)
1539 : gen_rtx_SET (reg, mem));
1540 size += GET_MODE_SIZE (mode);
1542 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1545 /* Save the state required to perform an untyped call with the same
1546 arguments as were passed to the current function. */
1548 static rtx
1549 expand_builtin_apply_args_1 (void)
1551 rtx registers, tem;
1552 int size, align, regno;
1553 fixed_size_mode mode;
1554 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1556 /* Create a block where the arg-pointer, structure value address,
1557 and argument registers can be saved. */
1558 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1560 /* Walk past the arg-pointer and structure value address. */
1561 size = GET_MODE_SIZE (Pmode);
1562 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1563 size += GET_MODE_SIZE (Pmode);
1565 /* Save each register used in calling a function to the block. */
1566 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1567 if ((mode = apply_args_mode[regno]) != VOIDmode)
1569 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1570 if (size % align != 0)
1571 size = CEIL (size, align) * align;
1573 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1575 emit_move_insn (adjust_address (registers, mode, size), tem);
1576 size += GET_MODE_SIZE (mode);
1579 /* Save the arg pointer to the block. */
1580 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1581 /* We need the pointer as the caller actually passed them to us, not
1582 as we might have pretended they were passed. Make sure it's a valid
1583 operand, as emit_move_insn isn't expected to handle a PLUS. */
1584 if (STACK_GROWS_DOWNWARD)
1586 = force_operand (plus_constant (Pmode, tem,
1587 crtl->args.pretend_args_size),
1588 NULL_RTX);
1589 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1591 size = GET_MODE_SIZE (Pmode);
1593 /* Save the structure value address unless this is passed as an
1594 "invisible" first argument. */
1595 if (struct_incoming_value)
1596 emit_move_insn (adjust_address (registers, Pmode, size),
1597 copy_to_reg (struct_incoming_value));
1599 /* Return the address of the block. */
1600 return copy_addr_to_reg (XEXP (registers, 0));
1603 /* __builtin_apply_args returns block of memory allocated on
1604 the stack into which is stored the arg pointer, structure
1605 value address, static chain, and all the registers that might
1606 possibly be used in performing a function call. The code is
1607 moved to the start of the function so the incoming values are
1608 saved. */
1610 static rtx
1611 expand_builtin_apply_args (void)
1613 /* Don't do __builtin_apply_args more than once in a function.
1614 Save the result of the first call and reuse it. */
1615 if (apply_args_value != 0)
1616 return apply_args_value;
1618 /* When this function is called, it means that registers must be
1619 saved on entry to this function. So we migrate the
1620 call to the first insn of this function. */
1621 rtx temp;
1623 start_sequence ();
1624 temp = expand_builtin_apply_args_1 ();
1625 rtx_insn *seq = get_insns ();
1626 end_sequence ();
1628 apply_args_value = temp;
1630 /* Put the insns after the NOTE that starts the function.
1631 If this is inside a start_sequence, make the outer-level insn
1632 chain current, so the code is placed at the start of the
1633 function. If internal_arg_pointer is a non-virtual pseudo,
1634 it needs to be placed after the function that initializes
1635 that pseudo. */
1636 push_topmost_sequence ();
1637 if (REG_P (crtl->args.internal_arg_pointer)
1638 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1639 emit_insn_before (seq, parm_birth_insn);
1640 else
1641 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1642 pop_topmost_sequence ();
1643 return temp;
1647 /* Perform an untyped call and save the state required to perform an
1648 untyped return of whatever value was returned by the given function. */
1650 static rtx
1651 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1653 int size, align, regno;
1654 fixed_size_mode mode;
1655 rtx incoming_args, result, reg, dest, src;
1656 rtx_call_insn *call_insn;
1657 rtx old_stack_level = 0;
1658 rtx call_fusage = 0;
1659 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1661 arguments = convert_memory_address (Pmode, arguments);
1663 /* Create a block where the return registers can be saved. */
1664 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1666 /* Fetch the arg pointer from the ARGUMENTS block. */
1667 incoming_args = gen_reg_rtx (Pmode);
1668 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1669 if (!STACK_GROWS_DOWNWARD)
1670 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1671 incoming_args, 0, OPTAB_LIB_WIDEN);
1673 /* Push a new argument block and copy the arguments. Do not allow
1674 the (potential) memcpy call below to interfere with our stack
1675 manipulations. */
1676 do_pending_stack_adjust ();
1677 NO_DEFER_POP;
1679 /* Save the stack with nonlocal if available. */
1680 if (targetm.have_save_stack_nonlocal ())
1681 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1682 else
1683 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1685 /* Allocate a block of memory onto the stack and copy the memory
1686 arguments to the outgoing arguments address. We can pass TRUE
1687 as the 4th argument because we just saved the stack pointer
1688 and will restore it right after the call. */
1689 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1691 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1692 may have already set current_function_calls_alloca to true.
1693 current_function_calls_alloca won't be set if argsize is zero,
1694 so we have to guarantee need_drap is true here. */
1695 if (SUPPORTS_STACK_ALIGNMENT)
1696 crtl->need_drap = true;
1698 dest = virtual_outgoing_args_rtx;
1699 if (!STACK_GROWS_DOWNWARD)
1701 if (CONST_INT_P (argsize))
1702 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1703 else
1704 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1706 dest = gen_rtx_MEM (BLKmode, dest);
1707 set_mem_align (dest, PARM_BOUNDARY);
1708 src = gen_rtx_MEM (BLKmode, incoming_args);
1709 set_mem_align (src, PARM_BOUNDARY);
1710 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1712 /* Refer to the argument block. */
1713 apply_args_size ();
1714 arguments = gen_rtx_MEM (BLKmode, arguments);
1715 set_mem_align (arguments, PARM_BOUNDARY);
1717 /* Walk past the arg-pointer and structure value address. */
1718 size = GET_MODE_SIZE (Pmode);
1719 if (struct_value)
1720 size += GET_MODE_SIZE (Pmode);
1722 /* Restore each of the registers previously saved. Make USE insns
1723 for each of these registers for use in making the call. */
1724 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1725 if ((mode = apply_args_mode[regno]) != VOIDmode)
1727 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1728 if (size % align != 0)
1729 size = CEIL (size, align) * align;
1730 reg = gen_rtx_REG (mode, regno);
1731 emit_move_insn (reg, adjust_address (arguments, mode, size));
1732 use_reg (&call_fusage, reg);
1733 size += GET_MODE_SIZE (mode);
1736 /* Restore the structure value address unless this is passed as an
1737 "invisible" first argument. */
1738 size = GET_MODE_SIZE (Pmode);
1739 if (struct_value)
1741 rtx value = gen_reg_rtx (Pmode);
1742 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1743 emit_move_insn (struct_value, value);
1744 if (REG_P (struct_value))
1745 use_reg (&call_fusage, struct_value);
1748 /* All arguments and registers used for the call are set up by now! */
1749 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1751 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1752 and we don't want to load it into a register as an optimization,
1753 because prepare_call_address already did it if it should be done. */
1754 if (GET_CODE (function) != SYMBOL_REF)
1755 function = memory_address (FUNCTION_MODE, function);
1757 /* Generate the actual call instruction and save the return value. */
1758 if (targetm.have_untyped_call ())
1760 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1761 rtx_insn *seq = targetm.gen_untyped_call (mem, result,
1762 result_vector (1, result));
1763 for (rtx_insn *insn = seq; insn; insn = NEXT_INSN (insn))
1764 if (CALL_P (insn))
1765 add_reg_note (insn, REG_UNTYPED_CALL, NULL_RTX);
1766 emit_insn (seq);
1768 else if (targetm.have_call_value ())
1770 rtx valreg = 0;
1772 /* Locate the unique return register. It is not possible to
1773 express a call that sets more than one return register using
1774 call_value; use untyped_call for that. In fact, untyped_call
1775 only needs to save the return registers in the given block. */
1776 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1777 if ((mode = apply_result_mode[regno]) != VOIDmode)
1779 gcc_assert (!valreg); /* have_untyped_call required. */
1781 valreg = gen_rtx_REG (mode, regno);
1784 emit_insn (targetm.gen_call_value (valreg,
1785 gen_rtx_MEM (FUNCTION_MODE, function),
1786 const0_rtx, NULL_RTX, const0_rtx));
1788 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1790 else
1791 gcc_unreachable ();
1793 /* Find the CALL insn we just emitted, and attach the register usage
1794 information. */
1795 call_insn = last_call_insn ();
1796 add_function_usage_to (call_insn, call_fusage);
1798 /* Restore the stack. */
1799 if (targetm.have_save_stack_nonlocal ())
1800 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1801 else
1802 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1803 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1805 OK_DEFER_POP;
1807 /* Return the address of the result block. */
1808 result = copy_addr_to_reg (XEXP (result, 0));
1809 return convert_memory_address (ptr_mode, result);
1812 /* Perform an untyped return. */
1814 static void
1815 expand_builtin_return (rtx result)
1817 int size, align, regno;
1818 fixed_size_mode mode;
1819 rtx reg;
1820 rtx_insn *call_fusage = 0;
1822 result = convert_memory_address (Pmode, result);
1824 apply_result_size ();
1825 result = gen_rtx_MEM (BLKmode, result);
1827 if (targetm.have_untyped_return ())
1829 rtx vector = result_vector (0, result);
1830 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1831 emit_barrier ();
1832 return;
1835 /* Restore the return value and note that each value is used. */
1836 size = 0;
1837 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1838 if ((mode = apply_result_mode[regno]) != VOIDmode)
1840 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1841 if (size % align != 0)
1842 size = CEIL (size, align) * align;
1843 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1844 emit_move_insn (reg, adjust_address (result, mode, size));
1846 push_to_sequence (call_fusage);
1847 emit_use (reg);
1848 call_fusage = get_insns ();
1849 end_sequence ();
1850 size += GET_MODE_SIZE (mode);
1853 /* Put the USE insns before the return. */
1854 emit_insn (call_fusage);
1856 /* Return whatever values was restored by jumping directly to the end
1857 of the function. */
1858 expand_naked_return ();
1861 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1864 type_to_class (tree type)
1866 switch (TREE_CODE (type))
1868 case VOID_TYPE: return void_type_class;
1869 case INTEGER_TYPE: return integer_type_class;
1870 case ENUMERAL_TYPE: return enumeral_type_class;
1871 case BOOLEAN_TYPE: return boolean_type_class;
1872 case POINTER_TYPE: return pointer_type_class;
1873 case REFERENCE_TYPE: return reference_type_class;
1874 case OFFSET_TYPE: return offset_type_class;
1875 case REAL_TYPE: return real_type_class;
1876 case COMPLEX_TYPE: return complex_type_class;
1877 case FUNCTION_TYPE: return function_type_class;
1878 case METHOD_TYPE: return method_type_class;
1879 case RECORD_TYPE: return record_type_class;
1880 case UNION_TYPE:
1881 case QUAL_UNION_TYPE: return union_type_class;
1882 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1883 ? string_type_class : array_type_class);
1884 case LANG_TYPE: return lang_type_class;
1885 case OPAQUE_TYPE: return opaque_type_class;
1886 case BITINT_TYPE: return bitint_type_class;
1887 case VECTOR_TYPE: return vector_type_class;
1888 default: return no_type_class;
1892 /* Expand a call EXP to __builtin_classify_type. */
1894 static rtx
1895 expand_builtin_classify_type (tree exp)
1897 if (call_expr_nargs (exp))
1898 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1899 return GEN_INT (no_type_class);
1902 /* This helper macro, meant to be used in mathfn_built_in below, determines
1903 which among a set of builtin math functions is appropriate for a given type
1904 mode. The `F' (float) and `L' (long double) are automatically generated
1905 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1906 types, there are additional types that are considered with 'F32', 'F64',
1907 'F128', etc. suffixes. */
1908 #define CASE_MATHFN(MATHFN) \
1909 CASE_CFN_##MATHFN: \
1910 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1911 fcodel = BUILT_IN_##MATHFN##L ; break;
1912 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1913 types. */
1914 #define CASE_MATHFN_FLOATN(MATHFN) \
1915 CASE_CFN_##MATHFN: \
1916 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1917 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1918 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1919 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1920 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1921 break;
1922 /* Similar to above, but appends _R after any F/L suffix. */
1923 #define CASE_MATHFN_REENT(MATHFN) \
1924 case CFN_BUILT_IN_##MATHFN##_R: \
1925 case CFN_BUILT_IN_##MATHFN##F_R: \
1926 case CFN_BUILT_IN_##MATHFN##L_R: \
1927 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1928 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1930 /* Return a function equivalent to FN but operating on floating-point
1931 values of type TYPE, or END_BUILTINS if no such function exists.
1932 This is purely an operation on function codes; it does not guarantee
1933 that the target actually has an implementation of the function. */
1935 static built_in_function
1936 mathfn_built_in_2 (tree type, combined_fn fn)
1938 tree mtype;
1939 built_in_function fcode, fcodef, fcodel;
1940 built_in_function fcodef16 = END_BUILTINS;
1941 built_in_function fcodef32 = END_BUILTINS;
1942 built_in_function fcodef64 = END_BUILTINS;
1943 built_in_function fcodef128 = END_BUILTINS;
1944 built_in_function fcodef32x = END_BUILTINS;
1945 built_in_function fcodef64x = END_BUILTINS;
1946 built_in_function fcodef128x = END_BUILTINS;
1948 /* If <math.h> has been included somehow, HUGE_VAL and NAN definitions
1949 break the uses below. */
1950 #undef HUGE_VAL
1951 #undef NAN
1953 switch (fn)
1955 #define SEQ_OF_CASE_MATHFN \
1956 CASE_MATHFN_FLOATN (ACOS) \
1957 CASE_MATHFN_FLOATN (ACOSH) \
1958 CASE_MATHFN_FLOATN (ASIN) \
1959 CASE_MATHFN_FLOATN (ASINH) \
1960 CASE_MATHFN_FLOATN (ATAN) \
1961 CASE_MATHFN_FLOATN (ATAN2) \
1962 CASE_MATHFN_FLOATN (ATANH) \
1963 CASE_MATHFN_FLOATN (CBRT) \
1964 CASE_MATHFN_FLOATN (CEIL) \
1965 CASE_MATHFN (CEXPI) \
1966 CASE_MATHFN_FLOATN (COPYSIGN) \
1967 CASE_MATHFN_FLOATN (COS) \
1968 CASE_MATHFN_FLOATN (COSH) \
1969 CASE_MATHFN (DREM) \
1970 CASE_MATHFN_FLOATN (ERF) \
1971 CASE_MATHFN_FLOATN (ERFC) \
1972 CASE_MATHFN_FLOATN (EXP) \
1973 CASE_MATHFN (EXP10) \
1974 CASE_MATHFN_FLOATN (EXP2) \
1975 CASE_MATHFN_FLOATN (EXPM1) \
1976 CASE_MATHFN_FLOATN (FABS) \
1977 CASE_MATHFN_FLOATN (FDIM) \
1978 CASE_MATHFN_FLOATN (FLOOR) \
1979 CASE_MATHFN_FLOATN (FMA) \
1980 CASE_MATHFN_FLOATN (FMAX) \
1981 CASE_MATHFN_FLOATN (FMIN) \
1982 CASE_MATHFN_FLOATN (FMOD) \
1983 CASE_MATHFN_FLOATN (FREXP) \
1984 CASE_MATHFN (GAMMA) \
1985 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */ \
1986 CASE_MATHFN_FLOATN (HUGE_VAL) \
1987 CASE_MATHFN_FLOATN (HYPOT) \
1988 CASE_MATHFN_FLOATN (ILOGB) \
1989 CASE_MATHFN (ICEIL) \
1990 CASE_MATHFN (IFLOOR) \
1991 CASE_MATHFN_FLOATN (INF) \
1992 CASE_MATHFN (IRINT) \
1993 CASE_MATHFN (IROUND) \
1994 CASE_MATHFN (ISINF) \
1995 CASE_MATHFN (J0) \
1996 CASE_MATHFN (J1) \
1997 CASE_MATHFN (JN) \
1998 CASE_MATHFN (LCEIL) \
1999 CASE_MATHFN_FLOATN (LDEXP) \
2000 CASE_MATHFN (LFLOOR) \
2001 CASE_MATHFN_FLOATN (LGAMMA) \
2002 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */ \
2003 CASE_MATHFN (LLCEIL) \
2004 CASE_MATHFN (LLFLOOR) \
2005 CASE_MATHFN_FLOATN (LLRINT) \
2006 CASE_MATHFN_FLOATN (LLROUND) \
2007 CASE_MATHFN_FLOATN (LOG) \
2008 CASE_MATHFN_FLOATN (LOG10) \
2009 CASE_MATHFN_FLOATN (LOG1P) \
2010 CASE_MATHFN_FLOATN (LOG2) \
2011 CASE_MATHFN_FLOATN (LOGB) \
2012 CASE_MATHFN_FLOATN (LRINT) \
2013 CASE_MATHFN_FLOATN (LROUND) \
2014 CASE_MATHFN_FLOATN (MODF) \
2015 CASE_MATHFN_FLOATN (NAN) \
2016 CASE_MATHFN_FLOATN (NANS) \
2017 CASE_MATHFN_FLOATN (NEARBYINT) \
2018 CASE_MATHFN_FLOATN (NEXTAFTER) \
2019 CASE_MATHFN (NEXTTOWARD) \
2020 CASE_MATHFN_FLOATN (POW) \
2021 CASE_MATHFN (POWI) \
2022 CASE_MATHFN (POW10) \
2023 CASE_MATHFN_FLOATN (REMAINDER) \
2024 CASE_MATHFN_FLOATN (REMQUO) \
2025 CASE_MATHFN_FLOATN (RINT) \
2026 CASE_MATHFN_FLOATN (ROUND) \
2027 CASE_MATHFN_FLOATN (ROUNDEVEN) \
2028 CASE_MATHFN (SCALB) \
2029 CASE_MATHFN_FLOATN (SCALBLN) \
2030 CASE_MATHFN_FLOATN (SCALBN) \
2031 CASE_MATHFN (SIGNBIT) \
2032 CASE_MATHFN (SIGNIFICAND) \
2033 CASE_MATHFN_FLOATN (SIN) \
2034 CASE_MATHFN (SINCOS) \
2035 CASE_MATHFN_FLOATN (SINH) \
2036 CASE_MATHFN_FLOATN (SQRT) \
2037 CASE_MATHFN_FLOATN (TAN) \
2038 CASE_MATHFN_FLOATN (TANH) \
2039 CASE_MATHFN_FLOATN (TGAMMA) \
2040 CASE_MATHFN_FLOATN (TRUNC) \
2041 CASE_MATHFN (Y0) \
2042 CASE_MATHFN (Y1) \
2043 CASE_MATHFN (YN)
2045 SEQ_OF_CASE_MATHFN
2047 default:
2048 return END_BUILTINS;
2051 mtype = TYPE_MAIN_VARIANT (type);
2052 if (mtype == double_type_node)
2053 return fcode;
2054 else if (mtype == float_type_node)
2055 return fcodef;
2056 else if (mtype == long_double_type_node)
2057 return fcodel;
2058 else if (mtype == float16_type_node)
2059 return fcodef16;
2060 else if (mtype == float32_type_node)
2061 return fcodef32;
2062 else if (mtype == float64_type_node)
2063 return fcodef64;
2064 else if (mtype == float128_type_node)
2065 return fcodef128;
2066 else if (mtype == float32x_type_node)
2067 return fcodef32x;
2068 else if (mtype == float64x_type_node)
2069 return fcodef64x;
2070 else if (mtype == float128x_type_node)
2071 return fcodef128x;
2072 else
2073 return END_BUILTINS;
2076 #undef CASE_MATHFN
2077 #undef CASE_MATHFN_FLOATN
2078 #undef CASE_MATHFN_REENT
2080 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2081 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2082 otherwise use the explicit declaration. If we can't do the conversion,
2083 return null. */
2085 static tree
2086 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2088 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2089 if (fcode2 == END_BUILTINS)
2090 return NULL_TREE;
2092 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2093 return NULL_TREE;
2095 return builtin_decl_explicit (fcode2);
2098 /* Like mathfn_built_in_1, but always use the implicit array. */
2100 tree
2101 mathfn_built_in (tree type, combined_fn fn)
2103 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2106 /* Like mathfn_built_in_1, but always use the explicit array. */
2108 tree
2109 mathfn_built_in_explicit (tree type, combined_fn fn)
2111 return mathfn_built_in_1 (type, fn, /*implicit=*/ 0);
2114 /* Like mathfn_built_in_1, but take a built_in_function and
2115 always use the implicit array. */
2117 tree
2118 mathfn_built_in (tree type, enum built_in_function fn)
2120 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2123 /* Return the type associated with a built in function, i.e., the one
2124 to be passed to mathfn_built_in to get the type-specific
2125 function. */
2127 tree
2128 mathfn_built_in_type (combined_fn fn)
2130 #define CASE_MATHFN(MATHFN) \
2131 case CFN_BUILT_IN_##MATHFN: \
2132 return double_type_node; \
2133 case CFN_BUILT_IN_##MATHFN##F: \
2134 return float_type_node; \
2135 case CFN_BUILT_IN_##MATHFN##L: \
2136 return long_double_type_node;
2138 #define CASE_MATHFN_FLOATN(MATHFN) \
2139 CASE_MATHFN(MATHFN) \
2140 case CFN_BUILT_IN_##MATHFN##F16: \
2141 return float16_type_node; \
2142 case CFN_BUILT_IN_##MATHFN##F32: \
2143 return float32_type_node; \
2144 case CFN_BUILT_IN_##MATHFN##F64: \
2145 return float64_type_node; \
2146 case CFN_BUILT_IN_##MATHFN##F128: \
2147 return float128_type_node; \
2148 case CFN_BUILT_IN_##MATHFN##F32X: \
2149 return float32x_type_node; \
2150 case CFN_BUILT_IN_##MATHFN##F64X: \
2151 return float64x_type_node; \
2152 case CFN_BUILT_IN_##MATHFN##F128X: \
2153 return float128x_type_node;
2155 /* Similar to above, but appends _R after any F/L suffix. */
2156 #define CASE_MATHFN_REENT(MATHFN) \
2157 case CFN_BUILT_IN_##MATHFN##_R: \
2158 return double_type_node; \
2159 case CFN_BUILT_IN_##MATHFN##F_R: \
2160 return float_type_node; \
2161 case CFN_BUILT_IN_##MATHFN##L_R: \
2162 return long_double_type_node;
2164 switch (fn)
2166 SEQ_OF_CASE_MATHFN
2168 default:
2169 return NULL_TREE;
2172 #undef CASE_MATHFN
2173 #undef CASE_MATHFN_FLOATN
2174 #undef CASE_MATHFN_REENT
2175 #undef SEQ_OF_CASE_MATHFN
2178 /* Check whether there is an internal function associated with function FN
2179 and return type RETURN_TYPE. Return the function if so, otherwise return
2180 IFN_LAST.
2182 Note that this function only tests whether the function is defined in
2183 internals.def, not whether it is actually available on the target. */
2185 static internal_fn
2186 associated_internal_fn (built_in_function fn, tree return_type)
2188 switch (fn)
2190 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2191 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2192 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2193 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2194 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2195 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2196 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2197 #include "internal-fn.def"
2199 CASE_FLT_FN (BUILT_IN_POW10):
2200 return IFN_EXP10;
2202 CASE_FLT_FN (BUILT_IN_DREM):
2203 return IFN_REMAINDER;
2205 CASE_FLT_FN (BUILT_IN_SCALBN):
2206 CASE_FLT_FN (BUILT_IN_SCALBLN):
2207 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2208 return IFN_LDEXP;
2209 return IFN_LAST;
2211 default:
2212 return IFN_LAST;
2216 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2217 return its code, otherwise return IFN_LAST. Note that this function
2218 only tests whether the function is defined in internals.def, not whether
2219 it is actually available on the target. */
2221 internal_fn
2222 associated_internal_fn (tree fndecl)
2224 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2225 return associated_internal_fn (DECL_FUNCTION_CODE (fndecl),
2226 TREE_TYPE (TREE_TYPE (fndecl)));
2229 /* Check whether there is an internal function associated with function CFN
2230 and return type RETURN_TYPE. Return the function if so, otherwise return
2231 IFN_LAST.
2233 Note that this function only tests whether the function is defined in
2234 internals.def, not whether it is actually available on the target. */
2236 internal_fn
2237 associated_internal_fn (combined_fn cfn, tree return_type)
2239 if (internal_fn_p (cfn))
2240 return as_internal_fn (cfn);
2241 return associated_internal_fn (as_builtin_fn (cfn), return_type);
2244 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2245 on the current target by a call to an internal function, return the
2246 code of that internal function, otherwise return IFN_LAST. The caller
2247 is responsible for ensuring that any side-effects of the built-in
2248 call are dealt with correctly. E.g. if CALL sets errno, the caller
2249 must decide that the errno result isn't needed or make it available
2250 in some other way. */
2252 internal_fn
2253 replacement_internal_fn (gcall *call)
2255 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2257 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2258 if (ifn != IFN_LAST)
2260 tree_pair types = direct_internal_fn_types (ifn, call);
2261 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2262 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2263 return ifn;
2266 return IFN_LAST;
2269 /* Expand a call to the builtin trinary math functions (fma).
2270 Return NULL_RTX if a normal call should be emitted rather than expanding the
2271 function in-line. EXP is the expression that is a call to the builtin
2272 function; if convenient, the result should be placed in TARGET.
2273 SUBTARGET may be used as the target for computing one of EXP's
2274 operands. */
2276 static rtx
2277 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2279 optab builtin_optab;
2280 rtx op0, op1, op2, result;
2281 rtx_insn *insns;
2282 tree fndecl = get_callee_fndecl (exp);
2283 tree arg0, arg1, arg2;
2284 machine_mode mode;
2286 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2287 return NULL_RTX;
2289 arg0 = CALL_EXPR_ARG (exp, 0);
2290 arg1 = CALL_EXPR_ARG (exp, 1);
2291 arg2 = CALL_EXPR_ARG (exp, 2);
2293 switch (DECL_FUNCTION_CODE (fndecl))
2295 CASE_FLT_FN (BUILT_IN_FMA):
2296 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2297 builtin_optab = fma_optab; break;
2298 default:
2299 gcc_unreachable ();
2302 /* Make a suitable register to place result in. */
2303 mode = TYPE_MODE (TREE_TYPE (exp));
2305 /* Before working hard, check whether the instruction is available. */
2306 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2307 return NULL_RTX;
2309 result = gen_reg_rtx (mode);
2311 /* Always stabilize the argument list. */
2312 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2313 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2314 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2316 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2317 op1 = expand_normal (arg1);
2318 op2 = expand_normal (arg2);
2320 start_sequence ();
2322 /* Compute into RESULT.
2323 Set RESULT to wherever the result comes back. */
2324 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2325 result, 0);
2327 /* If we were unable to expand via the builtin, stop the sequence
2328 (without outputting the insns) and call to the library function
2329 with the stabilized argument list. */
2330 if (result == 0)
2332 end_sequence ();
2333 return expand_call (exp, target, target == const0_rtx);
2336 /* Output the entire sequence. */
2337 insns = get_insns ();
2338 end_sequence ();
2339 emit_insn (insns);
2341 return result;
2344 /* Expand a call to the builtin sin and cos math functions.
2345 Return NULL_RTX if a normal call should be emitted rather than expanding the
2346 function in-line. EXP is the expression that is a call to the builtin
2347 function; if convenient, the result should be placed in TARGET.
2348 SUBTARGET may be used as the target for computing one of EXP's
2349 operands. */
2351 static rtx
2352 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2354 optab builtin_optab;
2355 rtx op0;
2356 rtx_insn *insns;
2357 tree fndecl = get_callee_fndecl (exp);
2358 machine_mode mode;
2359 tree arg;
2361 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2362 return NULL_RTX;
2364 arg = CALL_EXPR_ARG (exp, 0);
2366 switch (DECL_FUNCTION_CODE (fndecl))
2368 CASE_FLT_FN (BUILT_IN_SIN):
2369 CASE_FLT_FN (BUILT_IN_COS):
2370 builtin_optab = sincos_optab; break;
2371 default:
2372 gcc_unreachable ();
2375 /* Make a suitable register to place result in. */
2376 mode = TYPE_MODE (TREE_TYPE (exp));
2378 /* Check if sincos insn is available, otherwise fallback
2379 to sin or cos insn. */
2380 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2381 switch (DECL_FUNCTION_CODE (fndecl))
2383 CASE_FLT_FN (BUILT_IN_SIN):
2384 builtin_optab = sin_optab; break;
2385 CASE_FLT_FN (BUILT_IN_COS):
2386 builtin_optab = cos_optab; break;
2387 default:
2388 gcc_unreachable ();
2391 /* Before working hard, check whether the instruction is available. */
2392 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2394 rtx result = gen_reg_rtx (mode);
2396 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2397 need to expand the argument again. This way, we will not perform
2398 side-effects more the once. */
2399 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2401 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2403 start_sequence ();
2405 /* Compute into RESULT.
2406 Set RESULT to wherever the result comes back. */
2407 if (builtin_optab == sincos_optab)
2409 int ok;
2411 switch (DECL_FUNCTION_CODE (fndecl))
2413 CASE_FLT_FN (BUILT_IN_SIN):
2414 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2415 break;
2416 CASE_FLT_FN (BUILT_IN_COS):
2417 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2418 break;
2419 default:
2420 gcc_unreachable ();
2422 gcc_assert (ok);
2424 else
2425 result = expand_unop (mode, builtin_optab, op0, result, 0);
2427 if (result != 0)
2429 /* Output the entire sequence. */
2430 insns = get_insns ();
2431 end_sequence ();
2432 emit_insn (insns);
2433 return result;
2436 /* If we were unable to expand via the builtin, stop the sequence
2437 (without outputting the insns) and call to the library function
2438 with the stabilized argument list. */
2439 end_sequence ();
2442 return expand_call (exp, target, target == const0_rtx);
2445 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2446 return an RTL instruction code that implements the functionality.
2447 If that isn't possible or available return CODE_FOR_nothing. */
2449 static enum insn_code
2450 interclass_mathfn_icode (tree arg, tree fndecl)
2452 bool errno_set = false;
2453 optab builtin_optab = unknown_optab;
2454 machine_mode mode;
2456 switch (DECL_FUNCTION_CODE (fndecl))
2458 CASE_FLT_FN (BUILT_IN_ILOGB):
2459 errno_set = true; builtin_optab = ilogb_optab; break;
2460 CASE_FLT_FN (BUILT_IN_ISINF):
2461 builtin_optab = isinf_optab; break;
2462 case BUILT_IN_ISNORMAL:
2463 case BUILT_IN_ISFINITE:
2464 CASE_FLT_FN (BUILT_IN_FINITE):
2465 case BUILT_IN_FINITED32:
2466 case BUILT_IN_FINITED64:
2467 case BUILT_IN_FINITED128:
2468 case BUILT_IN_ISINFD32:
2469 case BUILT_IN_ISINFD64:
2470 case BUILT_IN_ISINFD128:
2471 /* These builtins have no optabs (yet). */
2472 break;
2473 default:
2474 gcc_unreachable ();
2477 /* There's no easy way to detect the case we need to set EDOM. */
2478 if (flag_errno_math && errno_set)
2479 return CODE_FOR_nothing;
2481 /* Optab mode depends on the mode of the input argument. */
2482 mode = TYPE_MODE (TREE_TYPE (arg));
2484 if (builtin_optab)
2485 return optab_handler (builtin_optab, mode);
2486 return CODE_FOR_nothing;
2489 /* Expand a call to one of the builtin math functions that operate on
2490 floating point argument and output an integer result (ilogb, isinf,
2491 isnan, etc).
2492 Return 0 if a normal call should be emitted rather than expanding the
2493 function in-line. EXP is the expression that is a call to the builtin
2494 function; if convenient, the result should be placed in TARGET. */
2496 static rtx
2497 expand_builtin_interclass_mathfn (tree exp, rtx target)
2499 enum insn_code icode = CODE_FOR_nothing;
2500 rtx op0;
2501 tree fndecl = get_callee_fndecl (exp);
2502 machine_mode mode;
2503 tree arg;
2505 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2506 return NULL_RTX;
2508 arg = CALL_EXPR_ARG (exp, 0);
2509 icode = interclass_mathfn_icode (arg, fndecl);
2510 mode = TYPE_MODE (TREE_TYPE (arg));
2512 if (icode != CODE_FOR_nothing)
2514 class expand_operand ops[1];
2515 rtx_insn *last = get_last_insn ();
2516 tree orig_arg = arg;
2518 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2519 need to expand the argument again. This way, we will not perform
2520 side-effects more the once. */
2521 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2523 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2525 if (mode != GET_MODE (op0))
2526 op0 = convert_to_mode (mode, op0, 0);
2528 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2529 if (maybe_legitimize_operands (icode, 0, 1, ops)
2530 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2531 return ops[0].value;
2533 delete_insns_since (last);
2534 CALL_EXPR_ARG (exp, 0) = orig_arg;
2537 return NULL_RTX;
2540 /* Expand a call to the builtin sincos math function.
2541 Return NULL_RTX if a normal call should be emitted rather than expanding the
2542 function in-line. EXP is the expression that is a call to the builtin
2543 function. */
2545 static rtx
2546 expand_builtin_sincos (tree exp)
2548 rtx op0, op1, op2, target1, target2;
2549 machine_mode mode;
2550 tree arg, sinp, cosp;
2551 int result;
2552 location_t loc = EXPR_LOCATION (exp);
2553 tree alias_type, alias_off;
2555 if (!validate_arglist (exp, REAL_TYPE,
2556 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2557 return NULL_RTX;
2559 arg = CALL_EXPR_ARG (exp, 0);
2560 sinp = CALL_EXPR_ARG (exp, 1);
2561 cosp = CALL_EXPR_ARG (exp, 2);
2563 /* Make a suitable register to place result in. */
2564 mode = TYPE_MODE (TREE_TYPE (arg));
2566 /* Check if sincos insn is available, otherwise emit the call. */
2567 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2568 return NULL_RTX;
2570 target1 = gen_reg_rtx (mode);
2571 target2 = gen_reg_rtx (mode);
2573 op0 = expand_normal (arg);
2574 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2575 alias_off = build_int_cst (alias_type, 0);
2576 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2577 sinp, alias_off));
2578 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2579 cosp, alias_off));
2581 /* Compute into target1 and target2.
2582 Set TARGET to wherever the result comes back. */
2583 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2584 gcc_assert (result);
2586 /* Move target1 and target2 to the memory locations indicated
2587 by op1 and op2. */
2588 emit_move_insn (op1, target1);
2589 emit_move_insn (op2, target2);
2591 return const0_rtx;
2594 /* Expand call EXP to the fegetround builtin (from C99 fenv.h), returning the
2595 result and setting it in TARGET. Otherwise return NULL_RTX on failure. */
2596 static rtx
2597 expand_builtin_fegetround (tree exp, rtx target, machine_mode target_mode)
2599 if (!validate_arglist (exp, VOID_TYPE))
2600 return NULL_RTX;
2602 insn_code icode = direct_optab_handler (fegetround_optab, SImode);
2603 if (icode == CODE_FOR_nothing)
2604 return NULL_RTX;
2606 if (target == 0
2607 || GET_MODE (target) != target_mode
2608 || !(*insn_data[icode].operand[0].predicate) (target, target_mode))
2609 target = gen_reg_rtx (target_mode);
2611 rtx pat = GEN_FCN (icode) (target);
2612 if (!pat)
2613 return NULL_RTX;
2614 emit_insn (pat);
2616 return target;
2619 /* Expand call EXP to either feclearexcept or feraiseexcept builtins (from C99
2620 fenv.h), returning the result and setting it in TARGET. Otherwise return
2621 NULL_RTX on failure. */
2622 static rtx
2623 expand_builtin_feclear_feraise_except (tree exp, rtx target,
2624 machine_mode target_mode, optab op_optab)
2626 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
2627 return NULL_RTX;
2628 rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
2630 insn_code icode = direct_optab_handler (op_optab, SImode);
2631 if (icode == CODE_FOR_nothing)
2632 return NULL_RTX;
2634 if (!(*insn_data[icode].operand[1].predicate) (op0, GET_MODE (op0)))
2635 return NULL_RTX;
2637 if (target == 0
2638 || GET_MODE (target) != target_mode
2639 || !(*insn_data[icode].operand[0].predicate) (target, target_mode))
2640 target = gen_reg_rtx (target_mode);
2642 rtx pat = GEN_FCN (icode) (target, op0);
2643 if (!pat)
2644 return NULL_RTX;
2645 emit_insn (pat);
2647 return target;
2650 /* Expand a call to the internal cexpi builtin to the sincos math function.
2651 EXP is the expression that is a call to the builtin function; if convenient,
2652 the result should be placed in TARGET. */
2654 static rtx
2655 expand_builtin_cexpi (tree exp, rtx target)
2657 tree fndecl = get_callee_fndecl (exp);
2658 tree arg, type;
2659 machine_mode mode;
2660 rtx op0, op1, op2;
2661 location_t loc = EXPR_LOCATION (exp);
2663 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2664 return NULL_RTX;
2666 arg = CALL_EXPR_ARG (exp, 0);
2667 type = TREE_TYPE (arg);
2668 mode = TYPE_MODE (TREE_TYPE (arg));
2670 /* Try expanding via a sincos optab, fall back to emitting a libcall
2671 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2672 is only generated from sincos, cexp or if we have either of them. */
2673 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2675 op1 = gen_reg_rtx (mode);
2676 op2 = gen_reg_rtx (mode);
2678 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2680 /* Compute into op1 and op2. */
2681 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2683 else if (targetm.libc_has_function (function_sincos, type))
2685 tree call, fn = NULL_TREE;
2686 tree top1, top2;
2687 rtx op1a, op2a;
2689 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2690 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2691 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2692 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2693 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2694 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2695 else
2696 gcc_unreachable ();
2698 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2699 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2700 op1a = copy_addr_to_reg (XEXP (op1, 0));
2701 op2a = copy_addr_to_reg (XEXP (op2, 0));
2702 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2703 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2705 /* Make sure not to fold the sincos call again. */
2706 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2707 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2708 call, 3, arg, top1, top2));
2710 else
2712 tree call, fn = NULL_TREE, narg;
2713 tree ctype = build_complex_type (type);
2715 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2716 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2717 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2718 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2719 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2720 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2721 else
2722 gcc_unreachable ();
2724 /* If we don't have a decl for cexp create one. This is the
2725 friendliest fallback if the user calls __builtin_cexpi
2726 without full target C99 function support. */
2727 if (fn == NULL_TREE)
2729 tree fntype;
2730 const char *name = NULL;
2732 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2733 name = "cexpf";
2734 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2735 name = "cexp";
2736 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2737 name = "cexpl";
2739 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2740 fn = build_fn_decl (name, fntype);
2743 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2744 build_real (type, dconst0), arg);
2746 /* Make sure not to fold the cexp call again. */
2747 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2748 return expand_expr (build_call_nary (ctype, call, 1, narg),
2749 target, VOIDmode, EXPAND_NORMAL);
2752 /* Now build the proper return type. */
2753 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2754 make_tree (TREE_TYPE (arg), op2),
2755 make_tree (TREE_TYPE (arg), op1)),
2756 target, VOIDmode, EXPAND_NORMAL);
2759 /* Conveniently construct a function call expression. FNDECL names the
2760 function to be called, N is the number of arguments, and the "..."
2761 parameters are the argument expressions. Unlike build_call_exr
2762 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2764 static tree
2765 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2767 va_list ap;
2768 tree fntype = TREE_TYPE (fndecl);
2769 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2771 va_start (ap, n);
2772 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2773 va_end (ap);
2774 SET_EXPR_LOCATION (fn, loc);
2775 return fn;
2778 /* Expand the __builtin_issignaling builtin. This needs to handle
2779 all floating point formats that do support NaNs (for those that
2780 don't it just sets target to 0). */
2782 static rtx
2783 expand_builtin_issignaling (tree exp, rtx target)
2785 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2786 return NULL_RTX;
2788 tree arg = CALL_EXPR_ARG (exp, 0);
2789 scalar_float_mode fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
2790 const struct real_format *fmt = REAL_MODE_FORMAT (fmode);
2792 /* Expand the argument yielding a RTX expression. */
2793 rtx temp = expand_normal (arg);
2795 /* If mode doesn't support NaN, always return 0.
2796 Don't use !HONOR_SNANS (fmode) here, so there is some possibility of
2797 __builtin_issignaling working without -fsignaling-nans. Especially
2798 when -fno-signaling-nans is the default.
2799 On the other side, MODE_HAS_NANS (fmode) is unnecessary, with
2800 -ffinite-math-only even __builtin_isnan or __builtin_fpclassify
2801 fold to 0 or non-NaN/Inf classification. */
2802 if (!HONOR_NANS (fmode))
2804 emit_move_insn (target, const0_rtx);
2805 return target;
2808 /* Check if the back end provides an insn that handles issignaling for the
2809 argument's mode. */
2810 enum insn_code icode = optab_handler (issignaling_optab, fmode);
2811 if (icode != CODE_FOR_nothing)
2813 rtx_insn *last = get_last_insn ();
2814 rtx this_target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2815 if (maybe_emit_unop_insn (icode, this_target, temp, UNKNOWN))
2816 return this_target;
2817 delete_insns_since (last);
2820 if (DECIMAL_FLOAT_MODE_P (fmode))
2822 scalar_int_mode imode;
2823 rtx hi;
2824 switch (fmt->ieee_bits)
2826 case 32:
2827 case 64:
2828 imode = int_mode_for_mode (fmode).require ();
2829 temp = gen_lowpart (imode, temp);
2830 break;
2831 case 128:
2832 imode = int_mode_for_size (64, 1).require ();
2833 hi = NULL_RTX;
2834 /* For decimal128, TImode support isn't always there and even when
2835 it is, working on the DImode high part is usually better. */
2836 if (!MEM_P (temp))
2838 if (rtx t = simplify_gen_subreg (imode, temp, fmode,
2839 subreg_highpart_offset (imode,
2840 fmode)))
2841 hi = t;
2842 else
2844 scalar_int_mode imode2;
2845 if (int_mode_for_mode (fmode).exists (&imode2))
2847 rtx temp2 = gen_lowpart (imode2, temp);
2848 poly_uint64 off = subreg_highpart_offset (imode, imode2);
2849 if (rtx t = simplify_gen_subreg (imode, temp2,
2850 imode2, off))
2851 hi = t;
2854 if (!hi)
2856 rtx mem = assign_stack_temp (fmode, GET_MODE_SIZE (fmode));
2857 emit_move_insn (mem, temp);
2858 temp = mem;
2861 if (!hi)
2863 poly_int64 offset
2864 = subreg_highpart_offset (imode, GET_MODE (temp));
2865 hi = adjust_address (temp, imode, offset);
2867 temp = hi;
2868 break;
2869 default:
2870 gcc_unreachable ();
2872 /* In all of decimal{32,64,128}, there is MSB sign bit and sNaN
2873 have 6 bits below it all set. */
2874 rtx val
2875 = GEN_INT (HOST_WIDE_INT_C (0x3f) << (GET_MODE_BITSIZE (imode) - 7));
2876 temp = expand_binop (imode, and_optab, temp, val,
2877 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2878 temp = emit_store_flag_force (target, EQ, temp, val, imode, 1, 1);
2879 return temp;
2882 /* Only PDP11 has these defined differently but doesn't support NaNs. */
2883 gcc_assert (FLOAT_WORDS_BIG_ENDIAN == WORDS_BIG_ENDIAN);
2884 gcc_assert (fmt->signbit_ro > 0 && fmt->b == 2);
2885 gcc_assert (MODE_COMPOSITE_P (fmode)
2886 || (fmt->pnan == fmt->p
2887 && fmt->signbit_ro == fmt->signbit_rw));
2889 switch (fmt->p)
2891 case 106: /* IBM double double */
2892 /* For IBM double double, recurse on the most significant double. */
2893 gcc_assert (MODE_COMPOSITE_P (fmode));
2894 temp = convert_modes (DFmode, fmode, temp, 0);
2895 fmode = DFmode;
2896 fmt = REAL_MODE_FORMAT (DFmode);
2897 /* FALLTHRU */
2898 case 8: /* bfloat */
2899 case 11: /* IEEE half */
2900 case 24: /* IEEE single */
2901 case 53: /* IEEE double or Intel extended with rounding to double */
2902 if (fmt->p == 53 && fmt->signbit_ro == 79)
2903 goto extended;
2905 scalar_int_mode imode = int_mode_for_mode (fmode).require ();
2906 temp = gen_lowpart (imode, temp);
2907 rtx val = GEN_INT ((HOST_WIDE_INT_M1U << (fmt->p - 2))
2908 & ~(HOST_WIDE_INT_M1U << fmt->signbit_ro));
2909 if (fmt->qnan_msb_set)
2911 rtx mask = GEN_INT (~(HOST_WIDE_INT_M1U << fmt->signbit_ro));
2912 rtx bit = GEN_INT (HOST_WIDE_INT_1U << (fmt->p - 2));
2913 /* For non-MIPS/PA IEEE single/double/half or bfloat, expand to:
2914 ((temp ^ bit) & mask) > val. */
2915 temp = expand_binop (imode, xor_optab, temp, bit,
2916 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2917 temp = expand_binop (imode, and_optab, temp, mask,
2918 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2919 temp = emit_store_flag_force (target, GTU, temp, val, imode,
2920 1, 1);
2922 else
2924 /* For MIPS/PA IEEE single/double, expand to:
2925 (temp & val) == val. */
2926 temp = expand_binop (imode, and_optab, temp, val,
2927 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2928 temp = emit_store_flag_force (target, EQ, temp, val, imode,
2929 1, 1);
2932 break;
2933 case 113: /* IEEE quad */
2935 rtx hi = NULL_RTX, lo = NULL_RTX;
2936 scalar_int_mode imode = int_mode_for_size (64, 1).require ();
2937 /* For IEEE quad, TImode support isn't always there and even when
2938 it is, working on DImode parts is usually better. */
2939 if (!MEM_P (temp))
2941 hi = simplify_gen_subreg (imode, temp, fmode,
2942 subreg_highpart_offset (imode, fmode));
2943 lo = simplify_gen_subreg (imode, temp, fmode,
2944 subreg_lowpart_offset (imode, fmode));
2945 if (!hi || !lo)
2947 scalar_int_mode imode2;
2948 if (int_mode_for_mode (fmode).exists (&imode2))
2950 rtx temp2 = gen_lowpart (imode2, temp);
2951 hi = simplify_gen_subreg (imode, temp2, imode2,
2952 subreg_highpart_offset (imode,
2953 imode2));
2954 lo = simplify_gen_subreg (imode, temp2, imode2,
2955 subreg_lowpart_offset (imode,
2956 imode2));
2959 if (!hi || !lo)
2961 rtx mem = assign_stack_temp (fmode, GET_MODE_SIZE (fmode));
2962 emit_move_insn (mem, temp);
2963 temp = mem;
2966 if (!hi || !lo)
2968 poly_int64 offset
2969 = subreg_highpart_offset (imode, GET_MODE (temp));
2970 hi = adjust_address (temp, imode, offset);
2971 offset = subreg_lowpart_offset (imode, GET_MODE (temp));
2972 lo = adjust_address (temp, imode, offset);
2974 rtx val = GEN_INT ((HOST_WIDE_INT_M1U << (fmt->p - 2 - 64))
2975 & ~(HOST_WIDE_INT_M1U << (fmt->signbit_ro - 64)));
2976 if (fmt->qnan_msb_set)
2978 rtx mask = GEN_INT (~(HOST_WIDE_INT_M1U << (fmt->signbit_ro
2979 - 64)));
2980 rtx bit = GEN_INT (HOST_WIDE_INT_1U << (fmt->p - 2 - 64));
2981 /* For non-MIPS/PA IEEE quad, expand to:
2982 (((hi ^ bit) | ((lo | -lo) >> 63)) & mask) > val. */
2983 rtx nlo = expand_unop (imode, neg_optab, lo, NULL_RTX, 0);
2984 lo = expand_binop (imode, ior_optab, lo, nlo,
2985 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2986 lo = expand_shift (RSHIFT_EXPR, imode, lo, 63, NULL_RTX, 1);
2987 temp = expand_binop (imode, xor_optab, hi, bit,
2988 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2989 temp = expand_binop (imode, ior_optab, temp, lo,
2990 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2991 temp = expand_binop (imode, and_optab, temp, mask,
2992 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2993 temp = emit_store_flag_force (target, GTU, temp, val, imode,
2994 1, 1);
2996 else
2998 /* For MIPS/PA IEEE quad, expand to:
2999 (hi & val) == val. */
3000 temp = expand_binop (imode, and_optab, hi, val,
3001 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3002 temp = emit_store_flag_force (target, EQ, temp, val, imode,
3003 1, 1);
3006 break;
3007 case 64: /* Intel or Motorola extended */
3008 extended:
3010 rtx ex, hi, lo;
3011 scalar_int_mode imode = int_mode_for_size (32, 1).require ();
3012 scalar_int_mode iemode = int_mode_for_size (16, 1).require ();
3013 if (!MEM_P (temp))
3015 rtx mem = assign_stack_temp (fmode, GET_MODE_SIZE (fmode));
3016 emit_move_insn (mem, temp);
3017 temp = mem;
3019 if (fmt->signbit_ro == 95)
3021 /* Motorola, always big endian, with 16-bit gap in between
3022 16-bit sign+exponent and 64-bit mantissa. */
3023 ex = adjust_address (temp, iemode, 0);
3024 hi = adjust_address (temp, imode, 4);
3025 lo = adjust_address (temp, imode, 8);
3027 else if (!WORDS_BIG_ENDIAN)
3029 /* Intel little endian, 64-bit mantissa followed by 16-bit
3030 sign+exponent and then either 16 or 48 bits of gap. */
3031 ex = adjust_address (temp, iemode, 8);
3032 hi = adjust_address (temp, imode, 4);
3033 lo = adjust_address (temp, imode, 0);
3035 else
3037 /* Big endian Itanium. */
3038 ex = adjust_address (temp, iemode, 0);
3039 hi = adjust_address (temp, imode, 2);
3040 lo = adjust_address (temp, imode, 6);
3042 rtx val = GEN_INT (HOST_WIDE_INT_M1U << 30);
3043 gcc_assert (fmt->qnan_msb_set);
3044 rtx mask = GEN_INT (0x7fff);
3045 rtx bit = GEN_INT (HOST_WIDE_INT_1U << 30);
3046 /* For Intel/Motorola extended format, expand to:
3047 (ex & mask) == mask && ((hi ^ bit) | ((lo | -lo) >> 31)) > val. */
3048 rtx nlo = expand_unop (imode, neg_optab, lo, NULL_RTX, 0);
3049 lo = expand_binop (imode, ior_optab, lo, nlo,
3050 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3051 lo = expand_shift (RSHIFT_EXPR, imode, lo, 31, NULL_RTX, 1);
3052 temp = expand_binop (imode, xor_optab, hi, bit,
3053 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3054 temp = expand_binop (imode, ior_optab, temp, lo,
3055 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3056 temp = emit_store_flag_force (target, GTU, temp, val, imode, 1, 1);
3057 ex = expand_binop (iemode, and_optab, ex, mask,
3058 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3059 ex = emit_store_flag_force (gen_reg_rtx (GET_MODE (temp)), EQ,
3060 ex, mask, iemode, 1, 1);
3061 temp = expand_binop (GET_MODE (temp), and_optab, temp, ex,
3062 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3064 break;
3065 default:
3066 gcc_unreachable ();
3069 return temp;
3072 /* Expand a call to one of the builtin rounding functions gcc defines
3073 as an extension (lfloor and lceil). As these are gcc extensions we
3074 do not need to worry about setting errno to EDOM.
3075 If expanding via optab fails, lower expression to (int)(floor(x)).
3076 EXP is the expression that is a call to the builtin function;
3077 if convenient, the result should be placed in TARGET. */
3079 static rtx
3080 expand_builtin_int_roundingfn (tree exp, rtx target)
3082 convert_optab builtin_optab;
3083 rtx op0, tmp;
3084 rtx_insn *insns;
3085 tree fndecl = get_callee_fndecl (exp);
3086 enum built_in_function fallback_fn;
3087 tree fallback_fndecl;
3088 machine_mode mode;
3089 tree arg;
3091 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3092 return NULL_RTX;
3094 arg = CALL_EXPR_ARG (exp, 0);
3096 switch (DECL_FUNCTION_CODE (fndecl))
3098 CASE_FLT_FN (BUILT_IN_ICEIL):
3099 CASE_FLT_FN (BUILT_IN_LCEIL):
3100 CASE_FLT_FN (BUILT_IN_LLCEIL):
3101 builtin_optab = lceil_optab;
3102 fallback_fn = BUILT_IN_CEIL;
3103 break;
3105 CASE_FLT_FN (BUILT_IN_IFLOOR):
3106 CASE_FLT_FN (BUILT_IN_LFLOOR):
3107 CASE_FLT_FN (BUILT_IN_LLFLOOR):
3108 builtin_optab = lfloor_optab;
3109 fallback_fn = BUILT_IN_FLOOR;
3110 break;
3112 default:
3113 gcc_unreachable ();
3116 /* Make a suitable register to place result in. */
3117 mode = TYPE_MODE (TREE_TYPE (exp));
3119 target = gen_reg_rtx (mode);
3121 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3122 need to expand the argument again. This way, we will not perform
3123 side-effects more the once. */
3124 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3126 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
3128 start_sequence ();
3130 /* Compute into TARGET. */
3131 if (expand_sfix_optab (target, op0, builtin_optab))
3133 /* Output the entire sequence. */
3134 insns = get_insns ();
3135 end_sequence ();
3136 emit_insn (insns);
3137 return target;
3140 /* If we were unable to expand via the builtin, stop the sequence
3141 (without outputting the insns). */
3142 end_sequence ();
3144 /* Fall back to floating point rounding optab. */
3145 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
3147 /* For non-C99 targets we may end up without a fallback fndecl here
3148 if the user called __builtin_lfloor directly. In this case emit
3149 a call to the floor/ceil variants nevertheless. This should result
3150 in the best user experience for not full C99 targets. */
3151 if (fallback_fndecl == NULL_TREE)
3153 tree fntype;
3154 const char *name = NULL;
3156 switch (DECL_FUNCTION_CODE (fndecl))
3158 case BUILT_IN_ICEIL:
3159 case BUILT_IN_LCEIL:
3160 case BUILT_IN_LLCEIL:
3161 name = "ceil";
3162 break;
3163 case BUILT_IN_ICEILF:
3164 case BUILT_IN_LCEILF:
3165 case BUILT_IN_LLCEILF:
3166 name = "ceilf";
3167 break;
3168 case BUILT_IN_ICEILL:
3169 case BUILT_IN_LCEILL:
3170 case BUILT_IN_LLCEILL:
3171 name = "ceill";
3172 break;
3173 case BUILT_IN_IFLOOR:
3174 case BUILT_IN_LFLOOR:
3175 case BUILT_IN_LLFLOOR:
3176 name = "floor";
3177 break;
3178 case BUILT_IN_IFLOORF:
3179 case BUILT_IN_LFLOORF:
3180 case BUILT_IN_LLFLOORF:
3181 name = "floorf";
3182 break;
3183 case BUILT_IN_IFLOORL:
3184 case BUILT_IN_LFLOORL:
3185 case BUILT_IN_LLFLOORL:
3186 name = "floorl";
3187 break;
3188 default:
3189 gcc_unreachable ();
3192 fntype = build_function_type_list (TREE_TYPE (arg),
3193 TREE_TYPE (arg), NULL_TREE);
3194 fallback_fndecl = build_fn_decl (name, fntype);
3197 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
3199 tmp = expand_normal (exp);
3200 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
3202 /* Truncate the result of floating point optab to integer
3203 via expand_fix (). */
3204 target = gen_reg_rtx (mode);
3205 expand_fix (target, tmp, 0);
3207 return target;
3210 /* Expand a call to one of the builtin math functions doing integer
3211 conversion (lrint).
3212 Return 0 if a normal call should be emitted rather than expanding the
3213 function in-line. EXP is the expression that is a call to the builtin
3214 function; if convenient, the result should be placed in TARGET. */
3216 static rtx
3217 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
3219 convert_optab builtin_optab;
3220 rtx op0;
3221 rtx_insn *insns;
3222 tree fndecl = get_callee_fndecl (exp);
3223 tree arg;
3224 machine_mode mode;
3225 enum built_in_function fallback_fn = BUILT_IN_NONE;
3227 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3228 return NULL_RTX;
3230 arg = CALL_EXPR_ARG (exp, 0);
3232 switch (DECL_FUNCTION_CODE (fndecl))
3234 CASE_FLT_FN (BUILT_IN_IRINT):
3235 fallback_fn = BUILT_IN_LRINT;
3236 gcc_fallthrough ();
3237 CASE_FLT_FN (BUILT_IN_LRINT):
3238 CASE_FLT_FN (BUILT_IN_LLRINT):
3239 builtin_optab = lrint_optab;
3240 break;
3242 CASE_FLT_FN (BUILT_IN_IROUND):
3243 fallback_fn = BUILT_IN_LROUND;
3244 gcc_fallthrough ();
3245 CASE_FLT_FN (BUILT_IN_LROUND):
3246 CASE_FLT_FN (BUILT_IN_LLROUND):
3247 builtin_optab = lround_optab;
3248 break;
3250 default:
3251 gcc_unreachable ();
3254 /* There's no easy way to detect the case we need to set EDOM. */
3255 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
3256 return NULL_RTX;
3258 /* Make a suitable register to place result in. */
3259 mode = TYPE_MODE (TREE_TYPE (exp));
3261 /* There's no easy way to detect the case we need to set EDOM. */
3262 if (!flag_errno_math)
3264 rtx result = gen_reg_rtx (mode);
3266 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3267 need to expand the argument again. This way, we will not perform
3268 side-effects more the once. */
3269 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3271 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
3273 start_sequence ();
3275 if (expand_sfix_optab (result, op0, builtin_optab))
3277 /* Output the entire sequence. */
3278 insns = get_insns ();
3279 end_sequence ();
3280 emit_insn (insns);
3281 return result;
3284 /* If we were unable to expand via the builtin, stop the sequence
3285 (without outputting the insns) and call to the library function
3286 with the stabilized argument list. */
3287 end_sequence ();
3290 if (fallback_fn != BUILT_IN_NONE)
3292 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
3293 targets, (int) round (x) should never be transformed into
3294 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
3295 a call to lround in the hope that the target provides at least some
3296 C99 functions. This should result in the best user experience for
3297 not full C99 targets.
3298 As scalar float conversions with same mode are useless in GIMPLE,
3299 we can end up e.g. with _Float32 argument passed to float builtin,
3300 try to get the type from the builtin prototype first. */
3301 tree fallback_fndecl = NULL_TREE;
3302 if (tree argtypes = TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
3303 fallback_fndecl
3304 = mathfn_built_in_1 (TREE_VALUE (argtypes),
3305 as_combined_fn (fallback_fn), 0);
3306 if (fallback_fndecl == NULL_TREE)
3307 fallback_fndecl
3308 = mathfn_built_in_1 (TREE_TYPE (arg),
3309 as_combined_fn (fallback_fn), 0);
3310 if (fallback_fndecl)
3312 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
3313 fallback_fndecl, 1, arg);
3315 target = expand_call (exp, NULL_RTX, target == const0_rtx);
3316 target = maybe_emit_group_store (target, TREE_TYPE (exp));
3317 return convert_to_mode (mode, target, 0);
3321 return expand_call (exp, target, target == const0_rtx);
3324 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3325 a normal call should be emitted rather than expanding the function
3326 in-line. EXP is the expression that is a call to the builtin
3327 function; if convenient, the result should be placed in TARGET. */
3329 static rtx
3330 expand_builtin_powi (tree exp, rtx target)
3332 tree arg0, arg1;
3333 rtx op0, op1;
3334 machine_mode mode;
3335 machine_mode mode2;
3337 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3338 return NULL_RTX;
3340 arg0 = CALL_EXPR_ARG (exp, 0);
3341 arg1 = CALL_EXPR_ARG (exp, 1);
3342 mode = TYPE_MODE (TREE_TYPE (exp));
3344 /* Emit a libcall to libgcc. */
3346 /* Mode of the 2nd argument must match that of an int. */
3347 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
3349 if (target == NULL_RTX)
3350 target = gen_reg_rtx (mode);
3352 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
3353 if (GET_MODE (op0) != mode)
3354 op0 = convert_to_mode (mode, op0, 0);
3355 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3356 if (GET_MODE (op1) != mode2)
3357 op1 = convert_to_mode (mode2, op1, 0);
3359 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3360 target, LCT_CONST, mode,
3361 op0, mode, op1, mode2);
3363 return target;
3366 /* Expand expression EXP which is a call to the strlen builtin. Return
3367 NULL_RTX if we failed and the caller should emit a normal call, otherwise
3368 try to get the result in TARGET, if convenient. */
3370 static rtx
3371 expand_builtin_strlen (tree exp, rtx target,
3372 machine_mode target_mode)
3374 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3375 return NULL_RTX;
3377 tree src = CALL_EXPR_ARG (exp, 0);
3379 /* If the length can be computed at compile-time, return it. */
3380 if (tree len = c_strlen (src, 0))
3381 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3383 /* If the length can be computed at compile-time and is constant
3384 integer, but there are side-effects in src, evaluate
3385 src for side-effects, then return len.
3386 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3387 can be optimized into: i++; x = 3; */
3388 tree len = c_strlen (src, 1);
3389 if (len && TREE_CODE (len) == INTEGER_CST)
3391 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3392 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3395 unsigned int align = get_pointer_alignment (src) / BITS_PER_UNIT;
3397 /* If SRC is not a pointer type, don't do this operation inline. */
3398 if (align == 0)
3399 return NULL_RTX;
3401 /* Bail out if we can't compute strlen in the right mode. */
3402 machine_mode insn_mode;
3403 enum insn_code icode = CODE_FOR_nothing;
3404 FOR_EACH_MODE_FROM (insn_mode, target_mode)
3406 icode = optab_handler (strlen_optab, insn_mode);
3407 if (icode != CODE_FOR_nothing)
3408 break;
3410 if (insn_mode == VOIDmode)
3411 return NULL_RTX;
3413 /* Make a place to hold the source address. We will not expand
3414 the actual source until we are sure that the expansion will
3415 not fail -- there are trees that cannot be expanded twice. */
3416 rtx src_reg = gen_reg_rtx (Pmode);
3418 /* Mark the beginning of the strlen sequence so we can emit the
3419 source operand later. */
3420 rtx_insn *before_strlen = get_last_insn ();
3422 class expand_operand ops[4];
3423 create_output_operand (&ops[0], target, insn_mode);
3424 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3425 create_integer_operand (&ops[2], 0);
3426 create_integer_operand (&ops[3], align);
3427 if (!maybe_expand_insn (icode, 4, ops))
3428 return NULL_RTX;
3430 /* Check to see if the argument was declared attribute nonstring
3431 and if so, issue a warning since at this point it's not known
3432 to be nul-terminated. */
3433 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3435 /* Now that we are assured of success, expand the source. */
3436 start_sequence ();
3437 rtx pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3438 if (pat != src_reg)
3440 #ifdef POINTERS_EXTEND_UNSIGNED
3441 if (GET_MODE (pat) != Pmode)
3442 pat = convert_to_mode (Pmode, pat,
3443 POINTERS_EXTEND_UNSIGNED);
3444 #endif
3445 emit_move_insn (src_reg, pat);
3447 pat = get_insns ();
3448 end_sequence ();
3450 if (before_strlen)
3451 emit_insn_after (pat, before_strlen);
3452 else
3453 emit_insn_before (pat, get_insns ());
3455 /* Return the value in the proper mode for this function. */
3456 if (GET_MODE (ops[0].value) == target_mode)
3457 target = ops[0].value;
3458 else if (target != 0)
3459 convert_move (target, ops[0].value, 0);
3460 else
3461 target = convert_to_mode (target_mode, ops[0].value, 0);
3463 return target;
3466 /* Expand call EXP to the strnlen built-in, returning the result
3467 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3469 static rtx
3470 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3472 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3473 return NULL_RTX;
3475 tree src = CALL_EXPR_ARG (exp, 0);
3476 tree bound = CALL_EXPR_ARG (exp, 1);
3478 if (!bound)
3479 return NULL_RTX;
3481 location_t loc = UNKNOWN_LOCATION;
3482 if (EXPR_HAS_LOCATION (exp))
3483 loc = EXPR_LOCATION (exp);
3485 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3486 so these conversions aren't necessary. */
3487 c_strlen_data lendata = { };
3488 tree len = c_strlen (src, 0, &lendata, 1);
3489 if (len)
3490 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3492 if (TREE_CODE (bound) == INTEGER_CST)
3494 if (!len)
3495 return NULL_RTX;
3497 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3498 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3501 if (TREE_CODE (bound) != SSA_NAME)
3502 return NULL_RTX;
3504 wide_int min, max;
3505 value_range r;
3506 get_global_range_query ()->range_of_expr (r, bound);
3507 if (r.varying_p () || r.undefined_p ())
3508 return NULL_RTX;
3509 min = r.lower_bound ();
3510 max = r.upper_bound ();
3512 if (!len || TREE_CODE (len) != INTEGER_CST)
3514 bool exact;
3515 lendata.decl = unterminated_array (src, &len, &exact);
3516 if (!lendata.decl)
3517 return NULL_RTX;
3520 if (lendata.decl)
3521 return NULL_RTX;
3523 if (wi::gtu_p (min, wi::to_wide (len)))
3524 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3526 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3527 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3530 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3531 bytes from bytes at DATA + OFFSET and return it reinterpreted as
3532 a target constant. */
3534 static rtx
3535 builtin_memcpy_read_str (void *data, void *, HOST_WIDE_INT offset,
3536 fixed_size_mode mode)
3538 /* The REPresentation pointed to by DATA need not be a nul-terminated
3539 string but the caller guarantees it's large enough for MODE. */
3540 const char *rep = (const char *) data;
3542 return c_readstr (rep + offset, mode, /*nul_terminated=*/false);
3545 /* LEN specify length of the block of memcpy/memset operation.
3546 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3547 In some cases we can make very likely guess on max size, then we
3548 set it into PROBABLE_MAX_SIZE. */
3550 static void
3551 determine_block_size (tree len, rtx len_rtx,
3552 unsigned HOST_WIDE_INT *min_size,
3553 unsigned HOST_WIDE_INT *max_size,
3554 unsigned HOST_WIDE_INT *probable_max_size)
3556 if (CONST_INT_P (len_rtx))
3558 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3559 return;
3561 else
3563 wide_int min, max;
3564 enum value_range_kind range_type = VR_UNDEFINED;
3566 /* Determine bounds from the type. */
3567 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3568 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3569 else
3570 *min_size = 0;
3571 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3572 *probable_max_size = *max_size
3573 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3574 else
3575 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3577 if (TREE_CODE (len) == SSA_NAME)
3579 value_range r;
3580 tree tmin, tmax;
3581 get_global_range_query ()->range_of_expr (r, len);
3582 range_type = get_legacy_range (r, tmin, tmax);
3583 if (range_type != VR_UNDEFINED)
3585 min = wi::to_wide (tmin);
3586 max = wi::to_wide (tmax);
3589 if (range_type == VR_RANGE)
3591 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3592 *min_size = min.to_uhwi ();
3593 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3594 *probable_max_size = *max_size = max.to_uhwi ();
3596 else if (range_type == VR_ANTI_RANGE)
3598 /* Code like
3600 int n;
3601 if (n < 100)
3602 memcpy (a, b, n)
3604 Produce anti range allowing negative values of N. We still
3605 can use the information and make a guess that N is not negative.
3607 if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3608 *probable_max_size = min.to_uhwi () - 1;
3611 gcc_checking_assert (*max_size <=
3612 (unsigned HOST_WIDE_INT)
3613 GET_MODE_MASK (GET_MODE (len_rtx)));
3616 /* Expand a call EXP to the memcpy builtin.
3617 Return NULL_RTX if we failed, the caller should emit a normal call,
3618 otherwise try to get the result in TARGET, if convenient (and in
3619 mode MODE if that's convenient). */
3621 static rtx
3622 expand_builtin_memcpy (tree exp, rtx target)
3624 if (!validate_arglist (exp,
3625 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3626 return NULL_RTX;
3628 tree dest = CALL_EXPR_ARG (exp, 0);
3629 tree src = CALL_EXPR_ARG (exp, 1);
3630 tree len = CALL_EXPR_ARG (exp, 2);
3632 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3633 /*retmode=*/ RETURN_BEGIN, false);
3636 /* Check a call EXP to the memmove built-in for validity.
3637 Return NULL_RTX on both success and failure. */
3639 static rtx
3640 expand_builtin_memmove (tree exp, rtx target)
3642 if (!validate_arglist (exp,
3643 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3644 return NULL_RTX;
3646 tree dest = CALL_EXPR_ARG (exp, 0);
3647 tree src = CALL_EXPR_ARG (exp, 1);
3648 tree len = CALL_EXPR_ARG (exp, 2);
3650 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3651 /*retmode=*/ RETURN_BEGIN, true);
3654 /* Expand a call EXP to the mempcpy builtin.
3655 Return NULL_RTX if we failed; the caller should emit a normal call,
3656 otherwise try to get the result in TARGET, if convenient (and in
3657 mode MODE if that's convenient). */
3659 static rtx
3660 expand_builtin_mempcpy (tree exp, rtx target)
3662 if (!validate_arglist (exp,
3663 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3664 return NULL_RTX;
3666 tree dest = CALL_EXPR_ARG (exp, 0);
3667 tree src = CALL_EXPR_ARG (exp, 1);
3668 tree len = CALL_EXPR_ARG (exp, 2);
3670 /* Policy does not generally allow using compute_objsize (which
3671 is used internally by check_memop_size) to change code generation
3672 or drive optimization decisions.
3674 In this instance it is safe because the code we generate has
3675 the same semantics regardless of the return value of
3676 check_memop_sizes. Exactly the same amount of data is copied
3677 and the return value is exactly the same in both cases.
3679 Furthermore, check_memop_size always uses mode 0 for the call to
3680 compute_objsize, so the imprecise nature of compute_objsize is
3681 avoided. */
3683 /* Avoid expanding mempcpy into memcpy when the call is determined
3684 to overflow the buffer. This also prevents the same overflow
3685 from being diagnosed again when expanding memcpy. */
3687 return expand_builtin_mempcpy_args (dest, src, len,
3688 target, exp, /*retmode=*/ RETURN_END);
3691 /* Helper function to do the actual work for expand of memory copy family
3692 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3693 of memory from SRC to DEST and assign to TARGET if convenient. Return
3694 value is based on RETMODE argument. */
3696 static rtx
3697 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3698 rtx target, tree exp, memop_ret retmode,
3699 bool might_overlap)
3701 unsigned int src_align = get_pointer_alignment (src);
3702 unsigned int dest_align = get_pointer_alignment (dest);
3703 rtx dest_mem, src_mem, dest_addr, len_rtx;
3704 HOST_WIDE_INT expected_size = -1;
3705 unsigned int expected_align = 0;
3706 unsigned HOST_WIDE_INT min_size;
3707 unsigned HOST_WIDE_INT max_size;
3708 unsigned HOST_WIDE_INT probable_max_size;
3710 bool is_move_done;
3712 /* If DEST is not a pointer type, call the normal function. */
3713 if (dest_align == 0)
3714 return NULL_RTX;
3716 /* If either SRC is not a pointer type, don't do this
3717 operation in-line. */
3718 if (src_align == 0)
3719 return NULL_RTX;
3721 if (currently_expanding_gimple_stmt)
3722 stringop_block_profile (currently_expanding_gimple_stmt,
3723 &expected_align, &expected_size);
3725 if (expected_align < dest_align)
3726 expected_align = dest_align;
3727 dest_mem = get_memory_rtx (dest, len);
3728 set_mem_align (dest_mem, dest_align);
3729 len_rtx = expand_normal (len);
3730 determine_block_size (len, len_rtx, &min_size, &max_size,
3731 &probable_max_size);
3733 /* Try to get the byte representation of the constant SRC points to,
3734 with its byte size in NBYTES. */
3735 unsigned HOST_WIDE_INT nbytes;
3736 const char *rep = getbyterep (src, &nbytes);
3738 /* If the function's constant bound LEN_RTX is less than or equal
3739 to the byte size of the representation of the constant argument,
3740 and if block move would be done by pieces, we can avoid loading
3741 the bytes from memory and only store the computed constant.
3742 This works in the overlap (memmove) case as well because
3743 store_by_pieces just generates a series of stores of constants
3744 from the representation returned by getbyterep(). */
3745 if (rep
3746 && CONST_INT_P (len_rtx)
3747 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes
3748 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3749 CONST_CAST (char *, rep),
3750 dest_align, false))
3752 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3753 builtin_memcpy_read_str,
3754 CONST_CAST (char *, rep),
3755 dest_align, false, retmode);
3756 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3757 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3758 return dest_mem;
3761 src_mem = get_memory_rtx (src, len);
3762 set_mem_align (src_mem, src_align);
3764 /* Copy word part most expediently. */
3765 enum block_op_methods method = BLOCK_OP_NORMAL;
3766 if (CALL_EXPR_TAILCALL (exp)
3767 && (retmode == RETURN_BEGIN || target == const0_rtx))
3768 method = BLOCK_OP_TAILCALL;
3769 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
3770 && retmode == RETURN_END
3771 && !might_overlap
3772 && target != const0_rtx);
3773 if (use_mempcpy_call)
3774 method = BLOCK_OP_NO_LIBCALL_RET;
3775 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3776 expected_align, expected_size,
3777 min_size, max_size, probable_max_size,
3778 use_mempcpy_call, &is_move_done,
3779 might_overlap, tree_ctz (len));
3781 /* Bail out when a mempcpy call would be expanded as libcall and when
3782 we have a target that provides a fast implementation
3783 of mempcpy routine. */
3784 if (!is_move_done)
3785 return NULL_RTX;
3787 if (dest_addr == pc_rtx)
3788 return NULL_RTX;
3790 if (dest_addr == 0)
3792 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3793 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3796 if (retmode != RETURN_BEGIN && target != const0_rtx)
3798 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3799 /* stpcpy pointer to last byte. */
3800 if (retmode == RETURN_END_MINUS_ONE)
3801 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3804 return dest_addr;
3807 static rtx
3808 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3809 rtx target, tree orig_exp, memop_ret retmode)
3811 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3812 retmode, false);
3815 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3816 we failed, the caller should emit a normal call, otherwise try to
3817 get the result in TARGET, if convenient.
3818 Return value is based on RETMODE argument. */
3820 static rtx
3821 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
3823 class expand_operand ops[3];
3824 rtx dest_mem;
3825 rtx src_mem;
3827 if (!targetm.have_movstr ())
3828 return NULL_RTX;
3830 dest_mem = get_memory_rtx (dest, NULL);
3831 src_mem = get_memory_rtx (src, NULL);
3832 if (retmode == RETURN_BEGIN)
3834 target = force_reg (Pmode, XEXP (dest_mem, 0));
3835 dest_mem = replace_equiv_address (dest_mem, target);
3838 create_output_operand (&ops[0],
3839 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
3840 create_fixed_operand (&ops[1], dest_mem);
3841 create_fixed_operand (&ops[2], src_mem);
3842 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3843 return NULL_RTX;
3845 if (retmode != RETURN_BEGIN && target != const0_rtx)
3847 target = ops[0].value;
3848 /* movstr is supposed to set end to the address of the NUL
3849 terminator. If the caller requested a mempcpy-like return value,
3850 adjust it. */
3851 if (retmode == RETURN_END)
3853 rtx tem = plus_constant (GET_MODE (target),
3854 gen_lowpart (GET_MODE (target), target), 1);
3855 emit_move_insn (target, force_operand (tem, NULL_RTX));
3858 return target;
3861 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3862 NULL_RTX if we failed the caller should emit a normal call, otherwise
3863 try to get the result in TARGET, if convenient (and in mode MODE if that's
3864 convenient). */
3866 static rtx
3867 expand_builtin_strcpy (tree exp, rtx target)
3869 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3870 return NULL_RTX;
3872 tree dest = CALL_EXPR_ARG (exp, 0);
3873 tree src = CALL_EXPR_ARG (exp, 1);
3875 return expand_builtin_strcpy_args (exp, dest, src, target);
3878 /* Helper function to do the actual work for expand_builtin_strcpy. The
3879 arguments to the builtin_strcpy call DEST and SRC are broken out
3880 so that this can also be called without constructing an actual CALL_EXPR.
3881 The other arguments and return value are the same as for
3882 expand_builtin_strcpy. */
3884 static rtx
3885 expand_builtin_strcpy_args (tree, tree dest, tree src, rtx target)
3887 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
3890 /* Expand a call EXP to the stpcpy builtin.
3891 Return NULL_RTX if we failed the caller should emit a normal call,
3892 otherwise try to get the result in TARGET, if convenient (and in
3893 mode MODE if that's convenient). */
3895 static rtx
3896 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
3898 tree dst, src;
3899 location_t loc = EXPR_LOCATION (exp);
3901 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3902 return NULL_RTX;
3904 dst = CALL_EXPR_ARG (exp, 0);
3905 src = CALL_EXPR_ARG (exp, 1);
3907 /* If return value is ignored, transform stpcpy into strcpy. */
3908 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3910 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3911 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3912 return expand_expr (result, target, mode, EXPAND_NORMAL);
3914 else
3916 tree len, lenp1;
3917 rtx ret;
3919 /* Ensure we get an actual string whose length can be evaluated at
3920 compile-time, not an expression containing a string. This is
3921 because the latter will potentially produce pessimized code
3922 when used to produce the return value. */
3923 c_strlen_data lendata = { };
3924 if (!c_getstr (src)
3925 || !(len = c_strlen (src, 0, &lendata, 1)))
3926 return expand_movstr (dst, src, target,
3927 /*retmode=*/ RETURN_END_MINUS_ONE);
3929 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3930 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3931 target, exp,
3932 /*retmode=*/ RETURN_END_MINUS_ONE);
3934 if (ret)
3935 return ret;
3937 if (TREE_CODE (len) == INTEGER_CST)
3939 rtx len_rtx = expand_normal (len);
3941 if (CONST_INT_P (len_rtx))
3943 ret = expand_builtin_strcpy_args (exp, dst, src, target);
3945 if (ret)
3947 if (! target)
3949 if (mode != VOIDmode)
3950 target = gen_reg_rtx (mode);
3951 else
3952 target = gen_reg_rtx (GET_MODE (ret));
3954 if (GET_MODE (target) != GET_MODE (ret))
3955 ret = gen_lowpart (GET_MODE (target), ret);
3957 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3958 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3959 gcc_assert (ret);
3961 return target;
3966 return expand_movstr (dst, src, target,
3967 /*retmode=*/ RETURN_END_MINUS_ONE);
3971 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
3972 arguments while being careful to avoid duplicate warnings (which could
3973 be issued if the expander were to expand the call, resulting in it
3974 being emitted in expand_call(). */
3976 static rtx
3977 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3979 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
3981 /* The call has been successfully expanded. Check for nonstring
3982 arguments and issue warnings as appropriate. */
3983 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3984 return ret;
3987 return NULL_RTX;
3990 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3991 bytes from constant string DATA + OFFSET and return it as target
3992 constant. */
3995 builtin_strncpy_read_str (void *data, void *, HOST_WIDE_INT offset,
3996 fixed_size_mode mode)
3998 const char *str = (const char *) data;
4000 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4001 return const0_rtx;
4003 return c_readstr (str + offset, mode);
4006 /* Helper to check the sizes of sequences and the destination of calls
4007 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4008 success (no overflow or invalid sizes), false otherwise. */
4010 static bool
4011 check_strncat_sizes (tree exp, tree objsize)
4013 tree dest = CALL_EXPR_ARG (exp, 0);
4014 tree src = CALL_EXPR_ARG (exp, 1);
4015 tree maxread = CALL_EXPR_ARG (exp, 2);
4017 /* Try to determine the range of lengths that the source expression
4018 refers to. */
4019 c_strlen_data lendata = { };
4020 get_range_strlen (src, &lendata, /* eltsize = */ 1);
4022 /* Try to verify that the destination is big enough for the shortest
4023 string. */
4025 access_data data (nullptr, exp, access_read_write, maxread, true);
4026 if (!objsize && warn_stringop_overflow)
4028 /* If it hasn't been provided by __strncat_chk, try to determine
4029 the size of the destination object into which the source is
4030 being copied. */
4031 objsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
4034 /* Add one for the terminating nul. */
4035 tree srclen = (lendata.minlen
4036 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
4037 size_one_node)
4038 : NULL_TREE);
4040 /* The strncat function copies at most MAXREAD bytes and always appends
4041 the terminating nul so the specified upper bound should never be equal
4042 to (or greater than) the size of the destination. */
4043 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4044 && tree_int_cst_equal (objsize, maxread))
4046 location_t loc = EXPR_LOCATION (exp);
4047 warning_at (loc, OPT_Wstringop_overflow_,
4048 "%qD specified bound %E equals destination size",
4049 get_callee_fndecl (exp), maxread);
4051 return false;
4054 if (!srclen
4055 || (maxread && tree_fits_uhwi_p (maxread)
4056 && tree_fits_uhwi_p (srclen)
4057 && tree_int_cst_lt (maxread, srclen)))
4058 srclen = maxread;
4060 /* The number of bytes to write is LEN but check_access will alsoa
4061 check SRCLEN if LEN's value isn't known. */
4062 return check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
4063 objsize, data.mode, &data);
4066 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4067 NULL_RTX if we failed the caller should emit a normal call. */
4069 static rtx
4070 expand_builtin_strncpy (tree exp, rtx target)
4072 location_t loc = EXPR_LOCATION (exp);
4074 if (!validate_arglist (exp,
4075 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4076 return NULL_RTX;
4077 tree dest = CALL_EXPR_ARG (exp, 0);
4078 tree src = CALL_EXPR_ARG (exp, 1);
4079 /* The number of bytes to write (not the maximum). */
4080 tree len = CALL_EXPR_ARG (exp, 2);
4082 /* The length of the source sequence. */
4083 tree slen = c_strlen (src, 1);
4085 /* We must be passed a constant len and src parameter. */
4086 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4087 return NULL_RTX;
4089 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4091 /* We're required to pad with trailing zeros if the requested
4092 len is greater than strlen(s2)+1. In that case try to
4093 use store_by_pieces, if it fails, punt. */
4094 if (tree_int_cst_lt (slen, len))
4096 unsigned int dest_align = get_pointer_alignment (dest);
4097 const char *p = c_getstr (src);
4098 rtx dest_mem;
4100 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4101 || !can_store_by_pieces (tree_to_uhwi (len),
4102 builtin_strncpy_read_str,
4103 CONST_CAST (char *, p),
4104 dest_align, false))
4105 return NULL_RTX;
4107 dest_mem = get_memory_rtx (dest, len);
4108 store_by_pieces (dest_mem, tree_to_uhwi (len),
4109 builtin_strncpy_read_str,
4110 CONST_CAST (char *, p), dest_align, false,
4111 RETURN_BEGIN);
4112 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4113 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4114 return dest_mem;
4117 return NULL_RTX;
4120 /* Return the RTL of a register in MODE generated from PREV in the
4121 previous iteration. */
4123 static rtx
4124 gen_memset_value_from_prev (by_pieces_prev *prev, fixed_size_mode mode)
4126 rtx target = nullptr;
4127 if (prev != nullptr && prev->data != nullptr)
4129 /* Use the previous data in the same mode. */
4130 if (prev->mode == mode)
4131 return prev->data;
4133 fixed_size_mode prev_mode = prev->mode;
4135 /* Don't use the previous data to write QImode if it is in a
4136 vector mode. */
4137 if (VECTOR_MODE_P (prev_mode) && mode == QImode)
4138 return target;
4140 rtx prev_rtx = prev->data;
4142 if (REG_P (prev_rtx)
4143 && HARD_REGISTER_P (prev_rtx)
4144 && lowpart_subreg_regno (REGNO (prev_rtx), prev_mode, mode) < 0)
4146 /* This case occurs when PREV_MODE is a vector and when
4147 MODE is too small to store using vector operations.
4148 After register allocation, the code will need to move the
4149 lowpart of the vector register into a non-vector register.
4151 Also, the target has chosen to use a hard register
4152 instead of going with the default choice of using a
4153 pseudo register. We should respect that choice and try to
4154 avoid creating a pseudo register with the same mode as the
4155 current hard register.
4157 In principle, we could just use a lowpart MODE subreg of
4158 the vector register. However, the vector register mode might
4159 be too wide for non-vector registers, and we already know
4160 that the non-vector mode is too small for vector registers.
4161 It's therefore likely that we'd need to spill to memory in
4162 the vector mode and reload the non-vector value from there.
4164 Try to avoid that by reducing the vector register to the
4165 smallest size that it can hold. This should increase the
4166 chances that non-vector registers can hold both the inner
4167 and outer modes of the subreg that we generate later. */
4168 machine_mode m;
4169 fixed_size_mode candidate;
4170 FOR_EACH_MODE_IN_CLASS (m, GET_MODE_CLASS (mode))
4171 if (is_a<fixed_size_mode> (m, &candidate))
4173 if (GET_MODE_SIZE (candidate)
4174 >= GET_MODE_SIZE (prev_mode))
4175 break;
4176 if (GET_MODE_SIZE (candidate) >= GET_MODE_SIZE (mode)
4177 && lowpart_subreg_regno (REGNO (prev_rtx),
4178 prev_mode, candidate) >= 0)
4180 target = lowpart_subreg (candidate, prev_rtx,
4181 prev_mode);
4182 prev_rtx = target;
4183 prev_mode = candidate;
4184 break;
4187 if (target == nullptr)
4188 prev_rtx = copy_to_reg (prev_rtx);
4191 target = lowpart_subreg (mode, prev_rtx, prev_mode);
4193 return target;
4196 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4197 bytes from constant string DATA + OFFSET and return it as target
4198 constant. If PREV isn't nullptr, it has the RTL info from the
4199 previous iteration. */
4202 builtin_memset_read_str (void *data, void *prev,
4203 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4204 fixed_size_mode mode)
4206 const char *c = (const char *) data;
4207 unsigned int size = GET_MODE_SIZE (mode);
4209 rtx target = gen_memset_value_from_prev ((by_pieces_prev *) prev,
4210 mode);
4211 if (target != nullptr)
4212 return target;
4213 rtx src = gen_int_mode (*c, QImode);
4215 if (VECTOR_MODE_P (mode))
4217 gcc_assert (GET_MODE_INNER (mode) == QImode);
4219 rtx const_vec = gen_const_vec_duplicate (mode, src);
4220 if (prev == NULL)
4221 /* Return CONST_VECTOR when called by a query function. */
4222 return const_vec;
4224 /* Use the move expander with CONST_VECTOR. */
4225 target = gen_reg_rtx (mode);
4226 emit_move_insn (target, const_vec);
4227 return target;
4230 char *p = XALLOCAVEC (char, size);
4232 memset (p, *c, size);
4234 return c_readstr (p, mode);
4237 /* Callback routine for store_by_pieces. Return the RTL of a register
4238 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4239 char value given in the RTL register data. For example, if mode is
4240 4 bytes wide, return the RTL for 0x01010101*data. If PREV isn't
4241 nullptr, it has the RTL info from the previous iteration. */
4243 static rtx
4244 builtin_memset_gen_str (void *data, void *prev,
4245 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4246 fixed_size_mode mode)
4248 rtx target, coeff;
4249 size_t size;
4250 char *p;
4252 size = GET_MODE_SIZE (mode);
4253 if (size == 1)
4254 return (rtx) data;
4256 target = gen_memset_value_from_prev ((by_pieces_prev *) prev, mode);
4257 if (target != nullptr)
4258 return target;
4260 if (VECTOR_MODE_P (mode))
4262 gcc_assert (GET_MODE_INNER (mode) == QImode);
4264 /* vec_duplicate_optab is a precondition to pick a vector mode for
4265 the memset expander. */
4266 insn_code icode = optab_handler (vec_duplicate_optab, mode);
4268 target = gen_reg_rtx (mode);
4269 class expand_operand ops[2];
4270 create_output_operand (&ops[0], target, mode);
4271 create_input_operand (&ops[1], (rtx) data, QImode);
4272 expand_insn (icode, 2, ops);
4273 if (!rtx_equal_p (target, ops[0].value))
4274 emit_move_insn (target, ops[0].value);
4276 return target;
4279 p = XALLOCAVEC (char, size);
4280 memset (p, 1, size);
4281 coeff = c_readstr (p, mode);
4283 target = convert_to_mode (mode, (rtx) data, 1);
4284 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4285 return force_reg (mode, target);
4288 /* Expand expression EXP, which is a call to the memset builtin. Return
4289 NULL_RTX if we failed the caller should emit a normal call, otherwise
4290 try to get the result in TARGET, if convenient (and in mode MODE if that's
4291 convenient). */
4294 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4296 if (!validate_arglist (exp,
4297 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4298 return NULL_RTX;
4300 tree dest = CALL_EXPR_ARG (exp, 0);
4301 tree val = CALL_EXPR_ARG (exp, 1);
4302 tree len = CALL_EXPR_ARG (exp, 2);
4304 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4307 /* Check that store_by_pieces allows BITS + LEN (so that we don't
4308 expand something too unreasonably long), and every power of 2 in
4309 BITS. It is assumed that LEN has already been tested by
4310 itself. */
4311 static bool
4312 can_store_by_multiple_pieces (unsigned HOST_WIDE_INT bits,
4313 by_pieces_constfn constfun,
4314 void *constfundata, unsigned int align,
4315 bool memsetp,
4316 unsigned HOST_WIDE_INT len)
4318 if (bits
4319 && !can_store_by_pieces (bits + len, constfun, constfundata,
4320 align, memsetp))
4321 return false;
4323 /* BITS set are expected to be generally in the low range and
4324 contiguous. We do NOT want to repeat the test above in case BITS
4325 has a single bit set, so we terminate the loop when BITS == BIT.
4326 In the unlikely case that BITS has the MSB set, also terminate in
4327 case BIT gets shifted out. */
4328 for (unsigned HOST_WIDE_INT bit = 1; bit < bits && bit; bit <<= 1)
4330 if ((bits & bit) == 0)
4331 continue;
4333 if (!can_store_by_pieces (bit, constfun, constfundata,
4334 align, memsetp))
4335 return false;
4338 return true;
4341 /* Try to store VAL (or, if NULL_RTX, VALC) in LEN bytes starting at TO.
4342 Return TRUE if successful, FALSE otherwise. TO is assumed to be
4343 aligned at an ALIGN-bits boundary. LEN must be a multiple of
4344 1<<CTZ_LEN between MIN_LEN and MAX_LEN.
4346 The strategy is to issue one store_by_pieces for each power of two,
4347 from most to least significant, guarded by a test on whether there
4348 are at least that many bytes left to copy in LEN.
4350 ??? Should we skip some powers of two in favor of loops? Maybe start
4351 at the max of TO/LEN/word alignment, at least when optimizing for
4352 size, instead of ensuring O(log len) dynamic compares? */
4354 bool
4355 try_store_by_multiple_pieces (rtx to, rtx len, unsigned int ctz_len,
4356 unsigned HOST_WIDE_INT min_len,
4357 unsigned HOST_WIDE_INT max_len,
4358 rtx val, char valc, unsigned int align)
4360 int max_bits = floor_log2 (max_len);
4361 int min_bits = floor_log2 (min_len);
4362 int sctz_len = ctz_len;
4364 gcc_checking_assert (sctz_len >= 0);
4366 if (val)
4367 valc = 1;
4369 /* Bits more significant than TST_BITS are part of the shared prefix
4370 in the binary representation of both min_len and max_len. Since
4371 they're identical, we don't need to test them in the loop. */
4372 int tst_bits = (max_bits != min_bits ? max_bits
4373 : floor_log2 (max_len ^ min_len));
4375 /* Save the pre-blksize values. */
4376 int orig_max_bits = max_bits;
4377 int orig_tst_bits = tst_bits;
4379 /* Check whether it's profitable to start by storing a fixed BLKSIZE
4380 bytes, to lower max_bits. In the unlikely case of a constant LEN
4381 (implied by identical MAX_LEN and MIN_LEN), we want to issue a
4382 single store_by_pieces, but otherwise, select the minimum multiple
4383 of the ALIGN (in bytes) and of the MCD of the possible LENs, that
4384 brings MAX_LEN below TST_BITS, if that's lower than min_len. */
4385 unsigned HOST_WIDE_INT blksize;
4386 if (max_len > min_len)
4388 unsigned HOST_WIDE_INT alrng = MAX (HOST_WIDE_INT_1U << ctz_len,
4389 align / BITS_PER_UNIT);
4390 blksize = max_len - (HOST_WIDE_INT_1U << tst_bits) + alrng;
4391 blksize &= ~(alrng - 1);
4393 else if (max_len == min_len)
4394 blksize = max_len;
4395 else
4396 /* Huh, max_len < min_len? Punt. See pr100843.c. */
4397 return false;
4398 if (min_len >= blksize
4399 /* ??? Maybe try smaller fixed-prefix blksizes before
4400 punting? */
4401 && can_store_by_pieces (blksize, builtin_memset_read_str,
4402 &valc, align, true))
4404 min_len -= blksize;
4405 min_bits = floor_log2 (min_len);
4406 max_len -= blksize;
4407 max_bits = floor_log2 (max_len);
4409 tst_bits = (max_bits != min_bits ? max_bits
4410 : floor_log2 (max_len ^ min_len));
4412 else
4413 blksize = 0;
4415 /* Check that we can use store by pieces for the maximum store count
4416 we may issue (initial fixed-size block, plus conditional
4417 power-of-two-sized from max_bits to ctz_len. */
4418 unsigned HOST_WIDE_INT xlenest = blksize;
4419 if (max_bits >= 0)
4420 xlenest += ((HOST_WIDE_INT_1U << max_bits) * 2
4421 - (HOST_WIDE_INT_1U << ctz_len));
4422 bool max_loop = false;
4423 bool use_store_by_pieces = true;
4424 /* Skip the test in case of overflow in xlenest. It shouldn't
4425 happen because of the way max_bits and blksize are related, but
4426 it doesn't hurt to test. */
4427 if (blksize > xlenest
4428 || !can_store_by_multiple_pieces (xlenest - blksize,
4429 builtin_memset_read_str,
4430 &valc, align, true, blksize))
4432 if (!(flag_inline_stringops & ILSOP_MEMSET))
4433 return false;
4435 for (max_bits = orig_max_bits;
4436 max_bits >= sctz_len;
4437 --max_bits)
4439 xlenest = ((HOST_WIDE_INT_1U << max_bits) * 2
4440 - (HOST_WIDE_INT_1U << ctz_len));
4441 /* Check that blksize plus the bits to be stored as blocks
4442 sized at powers of two can be stored by pieces. This is
4443 like the test above, but with smaller max_bits. Skip
4444 orig_max_bits (it would be redundant). Also skip in case
4445 of overflow. */
4446 if (max_bits < orig_max_bits
4447 && xlenest + blksize >= xlenest
4448 && can_store_by_multiple_pieces (xlenest,
4449 builtin_memset_read_str,
4450 &valc, align, true, blksize))
4452 max_loop = true;
4453 break;
4455 if (blksize
4456 && can_store_by_multiple_pieces (xlenest,
4457 builtin_memset_read_str,
4458 &valc, align, true, 0))
4460 max_len += blksize;
4461 min_len += blksize;
4462 tst_bits = orig_tst_bits;
4463 blksize = 0;
4464 max_loop = true;
4465 break;
4467 if (max_bits == sctz_len)
4469 /* We'll get here if can_store_by_pieces refuses to
4470 store even a single QImode. We'll fall back to
4471 QImode stores then. */
4472 if (!sctz_len)
4474 blksize = 0;
4475 max_loop = true;
4476 use_store_by_pieces = false;
4477 break;
4479 --sctz_len;
4480 --ctz_len;
4483 if (!max_loop)
4484 return false;
4485 /* If the boundaries are such that min and max may run a
4486 different number of trips in the initial loop, the remainder
4487 needs not be between the moduli, so set tst_bits to cover all
4488 bits. Otherwise, if the trip counts are the same, max_len
4489 has the common prefix, and the previously-computed tst_bits
4490 is usable. */
4491 if (max_len >> max_bits > min_len >> max_bits)
4492 tst_bits = max_bits;
4494 /* ??? Do we have to check that all powers of two lengths from
4495 max_bits down to ctz_len pass can_store_by_pieces? As in, could
4496 it possibly be that xlenest passes while smaller power-of-two
4497 sizes don't? */
4499 by_pieces_constfn constfun;
4500 void *constfundata;
4501 if (val)
4503 constfun = builtin_memset_gen_str;
4504 constfundata = val = force_reg (TYPE_MODE (unsigned_char_type_node),
4505 val);
4507 else
4509 constfun = builtin_memset_read_str;
4510 constfundata = &valc;
4513 rtx ptr = copy_addr_to_reg (XEXP (to, 0));
4514 rtx rem = copy_to_mode_reg (ptr_mode, convert_to_mode (ptr_mode, len, 0));
4515 to = replace_equiv_address (to, ptr);
4516 set_mem_align (to, align);
4518 if (blksize)
4520 to = store_by_pieces (to, blksize,
4521 constfun, constfundata,
4522 align, true,
4523 max_len != 0 ? RETURN_END : RETURN_BEGIN);
4524 if (max_len == 0)
4525 return true;
4527 /* Adjust PTR, TO and REM. Since TO's address is likely
4528 PTR+offset, we have to replace it. */
4529 emit_move_insn (ptr, force_operand (XEXP (to, 0), NULL_RTX));
4530 to = replace_equiv_address (to, ptr);
4531 rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
4532 emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
4535 /* Iterate over power-of-two block sizes from the maximum length to
4536 the least significant bit possibly set in the length. */
4537 for (int i = max_bits; i >= sctz_len; i--)
4539 rtx_code_label *loop_label = NULL;
4540 rtx_code_label *label = NULL;
4542 blksize = HOST_WIDE_INT_1U << i;
4544 /* If we're past the bits shared between min_ and max_len, expand
4545 a test on the dynamic length, comparing it with the
4546 BLKSIZE. */
4547 if (i <= tst_bits)
4549 label = gen_label_rtx ();
4550 emit_cmp_and_jump_insns (rem, GEN_INT (blksize), LT, NULL,
4551 ptr_mode, 1, label,
4552 profile_probability::even ());
4554 /* If we are at a bit that is in the prefix shared by min_ and
4555 max_len, skip the current BLKSIZE if the bit is clear, but do
4556 not skip the loop, even if it doesn't require
4557 prechecking. */
4558 else if ((max_len & blksize) == 0
4559 && !(max_loop && i == max_bits))
4560 continue;
4562 if (max_loop && i == max_bits)
4564 loop_label = gen_label_rtx ();
4565 emit_label (loop_label);
4566 /* Since we may run this multiple times, don't assume we
4567 know anything about the offset. */
4568 clear_mem_offset (to);
4571 bool update_needed = i != sctz_len || loop_label;
4572 rtx next_ptr = NULL_RTX;
4573 if (!use_store_by_pieces)
4575 gcc_checking_assert (blksize == 1);
4576 if (!val)
4577 val = gen_int_mode (valc, QImode);
4578 to = change_address (to, QImode, 0);
4579 emit_move_insn (to, val);
4580 if (update_needed)
4581 next_ptr = plus_constant (GET_MODE (ptr), ptr, blksize);
4583 else
4585 /* Issue a store of BLKSIZE bytes. */
4586 to = store_by_pieces (to, blksize,
4587 constfun, constfundata,
4588 align, true,
4589 update_needed ? RETURN_END : RETURN_BEGIN);
4590 next_ptr = XEXP (to, 0);
4592 /* Adjust REM and PTR, unless this is the last iteration. */
4593 if (update_needed)
4595 emit_move_insn (ptr, force_operand (next_ptr, NULL_RTX));
4596 to = replace_equiv_address (to, ptr);
4597 rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
4598 emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
4601 if (loop_label)
4602 emit_cmp_and_jump_insns (rem, GEN_INT (blksize), GE, NULL,
4603 ptr_mode, 1, loop_label,
4604 profile_probability::likely ());
4606 if (label)
4608 emit_label (label);
4610 /* Given conditional stores, the offset can no longer be
4611 known, so clear it. */
4612 clear_mem_offset (to);
4616 return true;
4619 /* Helper function to do the actual work for expand_builtin_memset. The
4620 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4621 so that this can also be called without constructing an actual CALL_EXPR.
4622 The other arguments and return value are the same as for
4623 expand_builtin_memset. */
4625 static rtx
4626 expand_builtin_memset_args (tree dest, tree val, tree len,
4627 rtx target, machine_mode mode, tree orig_exp)
4629 tree fndecl, fn;
4630 enum built_in_function fcode;
4631 machine_mode val_mode;
4632 char c;
4633 unsigned int dest_align;
4634 rtx dest_mem, dest_addr, len_rtx;
4635 HOST_WIDE_INT expected_size = -1;
4636 unsigned int expected_align = 0;
4637 unsigned HOST_WIDE_INT min_size;
4638 unsigned HOST_WIDE_INT max_size;
4639 unsigned HOST_WIDE_INT probable_max_size;
4641 dest_align = get_pointer_alignment (dest);
4643 /* If DEST is not a pointer type, don't do this operation in-line. */
4644 if (dest_align == 0)
4645 return NULL_RTX;
4647 if (currently_expanding_gimple_stmt)
4648 stringop_block_profile (currently_expanding_gimple_stmt,
4649 &expected_align, &expected_size);
4651 if (expected_align < dest_align)
4652 expected_align = dest_align;
4654 /* If the LEN parameter is zero, return DEST. */
4655 if (integer_zerop (len))
4657 /* Evaluate and ignore VAL in case it has side-effects. */
4658 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4659 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4662 /* Stabilize the arguments in case we fail. */
4663 dest = builtin_save_expr (dest);
4664 val = builtin_save_expr (val);
4665 len = builtin_save_expr (len);
4667 len_rtx = expand_normal (len);
4668 determine_block_size (len, len_rtx, &min_size, &max_size,
4669 &probable_max_size);
4670 dest_mem = get_memory_rtx (dest, len);
4671 val_mode = TYPE_MODE (unsigned_char_type_node);
4673 if (TREE_CODE (val) != INTEGER_CST
4674 || target_char_cast (val, &c))
4676 rtx val_rtx;
4678 val_rtx = expand_normal (val);
4679 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4681 /* Assume that we can memset by pieces if we can store
4682 * the coefficients by pieces (in the required modes).
4683 * We can't pass builtin_memset_gen_str as that emits RTL. */
4684 c = 1;
4685 if (tree_fits_uhwi_p (len)
4686 && can_store_by_pieces (tree_to_uhwi (len),
4687 builtin_memset_read_str, &c, dest_align,
4688 true))
4690 val_rtx = force_reg (val_mode, val_rtx);
4691 store_by_pieces (dest_mem, tree_to_uhwi (len),
4692 builtin_memset_gen_str, val_rtx, dest_align,
4693 true, RETURN_BEGIN);
4695 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4696 dest_align, expected_align,
4697 expected_size, min_size, max_size,
4698 probable_max_size)
4699 && !try_store_by_multiple_pieces (dest_mem, len_rtx,
4700 tree_ctz (len),
4701 min_size, max_size,
4702 val_rtx, 0,
4703 dest_align))
4704 goto do_libcall;
4706 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4707 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4708 return dest_mem;
4711 if (c)
4713 if (tree_fits_uhwi_p (len)
4714 && can_store_by_pieces (tree_to_uhwi (len),
4715 builtin_memset_read_str, &c, dest_align,
4716 true))
4717 store_by_pieces (dest_mem, tree_to_uhwi (len),
4718 builtin_memset_read_str, &c, dest_align, true,
4719 RETURN_BEGIN);
4720 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4721 gen_int_mode (c, val_mode),
4722 dest_align, expected_align,
4723 expected_size, min_size, max_size,
4724 probable_max_size)
4725 && !try_store_by_multiple_pieces (dest_mem, len_rtx,
4726 tree_ctz (len),
4727 min_size, max_size,
4728 NULL_RTX, c,
4729 dest_align))
4730 goto do_libcall;
4732 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4733 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4734 return dest_mem;
4737 set_mem_align (dest_mem, dest_align);
4738 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4739 CALL_EXPR_TAILCALL (orig_exp)
4740 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4741 expected_align, expected_size,
4742 min_size, max_size,
4743 probable_max_size, tree_ctz (len));
4745 if (dest_addr == 0)
4747 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4748 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4751 return dest_addr;
4753 do_libcall:
4754 fndecl = get_callee_fndecl (orig_exp);
4755 fcode = DECL_FUNCTION_CODE (fndecl);
4756 if (fcode == BUILT_IN_MEMSET)
4757 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4758 dest, val, len);
4759 else if (fcode == BUILT_IN_BZERO)
4760 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4761 dest, len);
4762 else
4763 gcc_unreachable ();
4764 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4765 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4766 return expand_call (fn, target, target == const0_rtx);
4769 /* Expand expression EXP, which is a call to the bzero builtin. Return
4770 NULL_RTX if we failed the caller should emit a normal call. */
4772 static rtx
4773 expand_builtin_bzero (tree exp)
4775 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4776 return NULL_RTX;
4778 tree dest = CALL_EXPR_ARG (exp, 0);
4779 tree size = CALL_EXPR_ARG (exp, 1);
4781 /* New argument list transforming bzero(ptr x, int y) to
4782 memset(ptr x, int 0, size_t y). This is done this way
4783 so that if it isn't expanded inline, we fallback to
4784 calling bzero instead of memset. */
4786 location_t loc = EXPR_LOCATION (exp);
4788 return expand_builtin_memset_args (dest, integer_zero_node,
4789 fold_convert_loc (loc,
4790 size_type_node, size),
4791 const0_rtx, VOIDmode, exp);
4794 /* Try to expand cmpstr operation ICODE with the given operands.
4795 Return the result rtx on success, otherwise return null. */
4797 static rtx
4798 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4799 HOST_WIDE_INT align)
4801 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4803 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4804 target = NULL_RTX;
4806 class expand_operand ops[4];
4807 create_output_operand (&ops[0], target, insn_mode);
4808 create_fixed_operand (&ops[1], arg1_rtx);
4809 create_fixed_operand (&ops[2], arg2_rtx);
4810 create_integer_operand (&ops[3], align);
4811 if (maybe_expand_insn (icode, 4, ops))
4812 return ops[0].value;
4813 return NULL_RTX;
4816 /* Expand expression EXP, which is a call to the memcmp built-in function.
4817 Return NULL_RTX if we failed and the caller should emit a normal call,
4818 otherwise try to get the result in TARGET, if convenient.
4819 RESULT_EQ is true if we can relax the returned value to be either zero
4820 or nonzero, without caring about the sign. */
4822 static rtx
4823 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4825 if (!validate_arglist (exp,
4826 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4827 return NULL_RTX;
4829 tree arg1 = CALL_EXPR_ARG (exp, 0);
4830 tree arg2 = CALL_EXPR_ARG (exp, 1);
4831 tree len = CALL_EXPR_ARG (exp, 2);
4833 /* Due to the performance benefit, always inline the calls first
4834 when result_eq is false. */
4835 rtx result = NULL_RTX;
4836 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4837 if (!result_eq && fcode != BUILT_IN_BCMP)
4839 result = inline_expand_builtin_bytecmp (exp, target);
4840 if (result)
4841 return result;
4844 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4845 location_t loc = EXPR_LOCATION (exp);
4847 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4848 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4850 /* If we don't have POINTER_TYPE, call the function. */
4851 if (arg1_align == 0 || arg2_align == 0)
4852 return NULL_RTX;
4854 rtx arg1_rtx = get_memory_rtx (arg1, len);
4855 rtx arg2_rtx = get_memory_rtx (arg2, len);
4856 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4858 /* Set MEM_SIZE as appropriate. */
4859 if (CONST_INT_P (len_rtx))
4861 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4862 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4865 by_pieces_constfn constfn = NULL;
4867 /* Try to get the byte representation of the constant ARG2 (or, only
4868 when the function's result is used for equality to zero, ARG1)
4869 points to, with its byte size in NBYTES. */
4870 unsigned HOST_WIDE_INT nbytes;
4871 const char *rep = getbyterep (arg2, &nbytes);
4872 if (result_eq && rep == NULL)
4874 /* For equality to zero the arguments are interchangeable. */
4875 rep = getbyterep (arg1, &nbytes);
4876 if (rep != NULL)
4877 std::swap (arg1_rtx, arg2_rtx);
4880 /* If the function's constant bound LEN_RTX is less than or equal
4881 to the byte size of the representation of the constant argument,
4882 and if block move would be done by pieces, we can avoid loading
4883 the bytes from memory and only store the computed constant result. */
4884 if (rep
4885 && CONST_INT_P (len_rtx)
4886 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes)
4887 constfn = builtin_memcpy_read_str;
4889 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4890 TREE_TYPE (len), target,
4891 result_eq, constfn,
4892 CONST_CAST (char *, rep),
4893 tree_ctz (len));
4895 if (result)
4897 /* Return the value in the proper mode for this function. */
4898 if (GET_MODE (result) == mode)
4899 return result;
4901 if (target != 0)
4903 convert_move (target, result, 0);
4904 return target;
4907 return convert_to_mode (mode, result, 0);
4910 return NULL_RTX;
4913 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4914 if we failed the caller should emit a normal call, otherwise try to get
4915 the result in TARGET, if convenient. */
4917 static rtx
4918 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4920 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4921 return NULL_RTX;
4923 tree arg1 = CALL_EXPR_ARG (exp, 0);
4924 tree arg2 = CALL_EXPR_ARG (exp, 1);
4926 /* Due to the performance benefit, always inline the calls first. */
4927 rtx result = NULL_RTX;
4928 result = inline_expand_builtin_bytecmp (exp, target);
4929 if (result)
4930 return result;
4932 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4933 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4934 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4935 return NULL_RTX;
4937 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4938 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4940 /* If we don't have POINTER_TYPE, call the function. */
4941 if (arg1_align == 0 || arg2_align == 0)
4942 return NULL_RTX;
4944 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4945 arg1 = builtin_save_expr (arg1);
4946 arg2 = builtin_save_expr (arg2);
4948 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4949 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4951 /* Try to call cmpstrsi. */
4952 if (cmpstr_icode != CODE_FOR_nothing)
4953 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4954 MIN (arg1_align, arg2_align));
4956 /* Try to determine at least one length and call cmpstrnsi. */
4957 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4959 tree len;
4960 rtx arg3_rtx;
4962 tree len1 = c_strlen (arg1, 1);
4963 tree len2 = c_strlen (arg2, 1);
4965 if (len1)
4966 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4967 if (len2)
4968 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4970 /* If we don't have a constant length for the first, use the length
4971 of the second, if we know it. We don't require a constant for
4972 this case; some cost analysis could be done if both are available
4973 but neither is constant. For now, assume they're equally cheap,
4974 unless one has side effects. If both strings have constant lengths,
4975 use the smaller. */
4977 if (!len1)
4978 len = len2;
4979 else if (!len2)
4980 len = len1;
4981 else if (TREE_SIDE_EFFECTS (len1))
4982 len = len2;
4983 else if (TREE_SIDE_EFFECTS (len2))
4984 len = len1;
4985 else if (TREE_CODE (len1) != INTEGER_CST)
4986 len = len2;
4987 else if (TREE_CODE (len2) != INTEGER_CST)
4988 len = len1;
4989 else if (tree_int_cst_lt (len1, len2))
4990 len = len1;
4991 else
4992 len = len2;
4994 /* If both arguments have side effects, we cannot optimize. */
4995 if (len && !TREE_SIDE_EFFECTS (len))
4997 arg3_rtx = expand_normal (len);
4998 result = expand_cmpstrn_or_cmpmem
4999 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
5000 arg3_rtx, MIN (arg1_align, arg2_align));
5004 tree fndecl = get_callee_fndecl (exp);
5005 if (result)
5007 /* Return the value in the proper mode for this function. */
5008 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5009 if (GET_MODE (result) == mode)
5010 return result;
5011 if (target == 0)
5012 return convert_to_mode (mode, result, 0);
5013 convert_move (target, result, 0);
5014 return target;
5017 /* Expand the library call ourselves using a stabilized argument
5018 list to avoid re-evaluating the function's arguments twice. */
5019 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
5020 copy_warning (fn, exp);
5021 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5022 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5023 return expand_call (fn, target, target == const0_rtx);
5026 /* Expand expression EXP, which is a call to the strncmp builtin. Return
5027 NULL_RTX if we failed the caller should emit a normal call, otherwise
5028 try to get the result in TARGET, if convenient. */
5030 static rtx
5031 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
5032 ATTRIBUTE_UNUSED machine_mode mode)
5034 if (!validate_arglist (exp,
5035 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5036 return NULL_RTX;
5038 tree arg1 = CALL_EXPR_ARG (exp, 0);
5039 tree arg2 = CALL_EXPR_ARG (exp, 1);
5040 tree arg3 = CALL_EXPR_ARG (exp, 2);
5042 location_t loc = EXPR_LOCATION (exp);
5043 tree len1 = c_strlen (arg1, 1);
5044 tree len2 = c_strlen (arg2, 1);
5046 /* Due to the performance benefit, always inline the calls first. */
5047 rtx result = NULL_RTX;
5048 result = inline_expand_builtin_bytecmp (exp, target);
5049 if (result)
5050 return result;
5052 /* If c_strlen can determine an expression for one of the string
5053 lengths, and it doesn't have side effects, then emit cmpstrnsi
5054 using length MIN(strlen(string)+1, arg3). */
5055 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
5056 if (cmpstrn_icode == CODE_FOR_nothing)
5057 return NULL_RTX;
5059 tree len;
5061 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5062 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
5064 if (len1)
5065 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
5066 if (len2)
5067 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
5069 tree len3 = fold_convert_loc (loc, sizetype, arg3);
5071 /* If we don't have a constant length for the first, use the length
5072 of the second, if we know it. If neither string is constant length,
5073 use the given length argument. We don't require a constant for
5074 this case; some cost analysis could be done if both are available
5075 but neither is constant. For now, assume they're equally cheap,
5076 unless one has side effects. If both strings have constant lengths,
5077 use the smaller. */
5079 if (!len1 && !len2)
5080 len = len3;
5081 else if (!len1)
5082 len = len2;
5083 else if (!len2)
5084 len = len1;
5085 else if (TREE_SIDE_EFFECTS (len1))
5086 len = len2;
5087 else if (TREE_SIDE_EFFECTS (len2))
5088 len = len1;
5089 else if (TREE_CODE (len1) != INTEGER_CST)
5090 len = len2;
5091 else if (TREE_CODE (len2) != INTEGER_CST)
5092 len = len1;
5093 else if (tree_int_cst_lt (len1, len2))
5094 len = len1;
5095 else
5096 len = len2;
5098 /* If we are not using the given length, we must incorporate it here.
5099 The actual new length parameter will be MIN(len,arg3) in this case. */
5100 if (len != len3)
5102 len = fold_convert_loc (loc, sizetype, len);
5103 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
5105 rtx arg1_rtx = get_memory_rtx (arg1, len);
5106 rtx arg2_rtx = get_memory_rtx (arg2, len);
5107 rtx arg3_rtx = expand_normal (len);
5108 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
5109 arg2_rtx, TREE_TYPE (len), arg3_rtx,
5110 MIN (arg1_align, arg2_align));
5112 tree fndecl = get_callee_fndecl (exp);
5113 if (result)
5115 /* Return the value in the proper mode for this function. */
5116 mode = TYPE_MODE (TREE_TYPE (exp));
5117 if (GET_MODE (result) == mode)
5118 return result;
5119 if (target == 0)
5120 return convert_to_mode (mode, result, 0);
5121 convert_move (target, result, 0);
5122 return target;
5125 /* Expand the library call ourselves using a stabilized argument
5126 list to avoid re-evaluating the function's arguments twice. */
5127 tree call = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
5128 copy_warning (call, exp);
5129 gcc_assert (TREE_CODE (call) == CALL_EXPR);
5130 CALL_EXPR_TAILCALL (call) = CALL_EXPR_TAILCALL (exp);
5131 return expand_call (call, target, target == const0_rtx);
5134 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
5135 if that's convenient. */
5138 expand_builtin_saveregs (void)
5140 rtx val;
5141 rtx_insn *seq;
5143 /* Don't do __builtin_saveregs more than once in a function.
5144 Save the result of the first call and reuse it. */
5145 if (saveregs_value != 0)
5146 return saveregs_value;
5148 /* When this function is called, it means that registers must be
5149 saved on entry to this function. So we migrate the call to the
5150 first insn of this function. */
5152 start_sequence ();
5154 /* Do whatever the machine needs done in this case. */
5155 val = targetm.calls.expand_builtin_saveregs ();
5157 seq = get_insns ();
5158 end_sequence ();
5160 saveregs_value = val;
5162 /* Put the insns after the NOTE that starts the function. If this
5163 is inside a start_sequence, make the outer-level insn chain current, so
5164 the code is placed at the start of the function. */
5165 push_topmost_sequence ();
5166 emit_insn_after (seq, entry_of_function ());
5167 pop_topmost_sequence ();
5169 return val;
5172 /* Expand a call to __builtin_next_arg. */
5174 static rtx
5175 expand_builtin_next_arg (void)
5177 /* Checking arguments is already done in fold_builtin_next_arg
5178 that must be called before this function. */
5179 return expand_binop (ptr_mode, add_optab,
5180 crtl->args.internal_arg_pointer,
5181 crtl->args.arg_offset_rtx,
5182 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5185 /* Make it easier for the backends by protecting the valist argument
5186 from multiple evaluations. */
5188 static tree
5189 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
5191 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
5193 /* The current way of determining the type of valist is completely
5194 bogus. We should have the information on the va builtin instead. */
5195 if (!vatype)
5196 vatype = targetm.fn_abi_va_list (cfun->decl);
5198 if (TREE_CODE (vatype) == ARRAY_TYPE)
5200 if (TREE_SIDE_EFFECTS (valist))
5201 valist = save_expr (valist);
5203 /* For this case, the backends will be expecting a pointer to
5204 vatype, but it's possible we've actually been given an array
5205 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
5206 So fix it. */
5207 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5209 tree p1 = build_pointer_type (TREE_TYPE (vatype));
5210 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
5213 else
5215 tree pt = build_pointer_type (vatype);
5217 if (! needs_lvalue)
5219 if (! TREE_SIDE_EFFECTS (valist))
5220 return valist;
5222 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
5223 TREE_SIDE_EFFECTS (valist) = 1;
5226 if (TREE_SIDE_EFFECTS (valist))
5227 valist = save_expr (valist);
5228 valist = fold_build2_loc (loc, MEM_REF,
5229 vatype, valist, build_int_cst (pt, 0));
5232 return valist;
5235 /* The "standard" definition of va_list is void*. */
5237 tree
5238 std_build_builtin_va_list (void)
5240 return ptr_type_node;
5243 /* The "standard" abi va_list is va_list_type_node. */
5245 tree
5246 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
5248 return va_list_type_node;
5251 /* The "standard" type of va_list is va_list_type_node. */
5253 tree
5254 std_canonical_va_list_type (tree type)
5256 tree wtype, htype;
5258 wtype = va_list_type_node;
5259 htype = type;
5261 if (TREE_CODE (wtype) == ARRAY_TYPE)
5263 /* If va_list is an array type, the argument may have decayed
5264 to a pointer type, e.g. by being passed to another function.
5265 In that case, unwrap both types so that we can compare the
5266 underlying records. */
5267 if (TREE_CODE (htype) == ARRAY_TYPE
5268 || POINTER_TYPE_P (htype))
5270 wtype = TREE_TYPE (wtype);
5271 htype = TREE_TYPE (htype);
5274 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
5275 return va_list_type_node;
5277 return NULL_TREE;
5280 /* The "standard" implementation of va_start: just assign `nextarg' to
5281 the variable. */
5283 void
5284 std_expand_builtin_va_start (tree valist, rtx nextarg)
5286 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
5287 convert_move (va_r, nextarg, 0);
5290 /* Expand EXP, a call to __builtin_va_start. */
5292 static rtx
5293 expand_builtin_va_start (tree exp)
5295 rtx nextarg;
5296 tree valist;
5297 location_t loc = EXPR_LOCATION (exp);
5299 if (call_expr_nargs (exp) < 2)
5301 error_at (loc, "too few arguments to function %<va_start%>");
5302 return const0_rtx;
5305 if (fold_builtin_next_arg (exp, true))
5306 return const0_rtx;
5308 nextarg = expand_builtin_next_arg ();
5309 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
5311 if (targetm.expand_builtin_va_start)
5312 targetm.expand_builtin_va_start (valist, nextarg);
5313 else
5314 std_expand_builtin_va_start (valist, nextarg);
5316 return const0_rtx;
5319 /* Expand EXP, a call to __builtin_va_end. */
5321 static rtx
5322 expand_builtin_va_end (tree exp)
5324 tree valist = CALL_EXPR_ARG (exp, 0);
5326 /* Evaluate for side effects, if needed. I hate macros that don't
5327 do that. */
5328 if (TREE_SIDE_EFFECTS (valist))
5329 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5331 return const0_rtx;
5334 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5335 builtin rather than just as an assignment in stdarg.h because of the
5336 nastiness of array-type va_list types. */
5338 static rtx
5339 expand_builtin_va_copy (tree exp)
5341 tree dst, src, t;
5342 location_t loc = EXPR_LOCATION (exp);
5344 dst = CALL_EXPR_ARG (exp, 0);
5345 src = CALL_EXPR_ARG (exp, 1);
5347 dst = stabilize_va_list_loc (loc, dst, 1);
5348 src = stabilize_va_list_loc (loc, src, 0);
5350 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5352 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5354 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5355 TREE_SIDE_EFFECTS (t) = 1;
5356 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5358 else
5360 rtx dstb, srcb, size;
5362 /* Evaluate to pointers. */
5363 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5364 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5365 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5366 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5368 dstb = convert_memory_address (Pmode, dstb);
5369 srcb = convert_memory_address (Pmode, srcb);
5371 /* "Dereference" to BLKmode memories. */
5372 dstb = gen_rtx_MEM (BLKmode, dstb);
5373 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5374 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5375 srcb = gen_rtx_MEM (BLKmode, srcb);
5376 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5377 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5379 /* Copy. */
5380 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5383 return const0_rtx;
5386 /* Expand a call to one of the builtin functions __builtin_frame_address or
5387 __builtin_return_address. */
5389 static rtx
5390 expand_builtin_frame_address (tree fndecl, tree exp)
5392 /* The argument must be a nonnegative integer constant.
5393 It counts the number of frames to scan up the stack.
5394 The value is either the frame pointer value or the return
5395 address saved in that frame. */
5396 if (call_expr_nargs (exp) == 0)
5397 /* Warning about missing arg was already issued. */
5398 return const0_rtx;
5399 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5401 error ("invalid argument to %qD", fndecl);
5402 return const0_rtx;
5404 else
5406 /* Number of frames to scan up the stack. */
5407 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5409 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5411 /* Some ports cannot access arbitrary stack frames. */
5412 if (tem == NULL)
5414 warning (0, "unsupported argument to %qD", fndecl);
5415 return const0_rtx;
5418 if (count)
5420 /* Warn since no effort is made to ensure that any frame
5421 beyond the current one exists or can be safely reached. */
5422 warning (OPT_Wframe_address, "calling %qD with "
5423 "a nonzero argument is unsafe", fndecl);
5426 /* For __builtin_frame_address, return what we've got. */
5427 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5428 return tem;
5430 if (!REG_P (tem)
5431 && ! CONSTANT_P (tem))
5432 tem = copy_addr_to_reg (tem);
5433 return tem;
5437 #if ! STACK_GROWS_DOWNWARD
5438 # define STACK_TOPS GT
5439 #else
5440 # define STACK_TOPS LT
5441 #endif
5443 #ifdef POINTERS_EXTEND_UNSIGNED
5444 # define STACK_UNSIGNED POINTERS_EXTEND_UNSIGNED
5445 #else
5446 # define STACK_UNSIGNED true
5447 #endif
5449 /* Expand a call to builtin function __builtin_stack_address. */
5451 static rtx
5452 expand_builtin_stack_address ()
5454 rtx ret = convert_to_mode (ptr_mode, copy_to_reg (stack_pointer_rtx),
5455 STACK_UNSIGNED);
5457 /* Unbias the stack pointer, bringing it to the boundary between the
5458 stack area claimed by the active function calling this builtin,
5459 and stack ranges that could get clobbered if it called another
5460 function. It should NOT encompass any stack red zone, that is
5461 used in leaf functions.
5463 On SPARC, the register save area is *not* considered active or
5464 used by the active function, but rather as akin to the area in
5465 which call-preserved registers are saved by callees. This
5466 enables __strub_leave to clear what would otherwise overlap with
5467 its own register save area.
5469 If the address is computed too high or too low, parts of a stack
5470 range that should be scrubbed may be left unscrubbed, scrubbing
5471 may corrupt active portions of the stack frame, and stack ranges
5472 may be doubly-scrubbed by caller and callee.
5474 In order for it to be just right, the area delimited by
5475 @code{__builtin_stack_address} and @code{__builtin_frame_address
5476 (0)} should encompass caller's registers saved by the function,
5477 local on-stack variables and @code{alloca} stack areas.
5478 Accumulated outgoing on-stack arguments, preallocated as part of
5479 a function's own prologue, are to be regarded as part of the
5480 (caller) function's active area as well, whereas those pushed or
5481 allocated temporarily for a call are regarded as part of the
5482 callee's stack range, rather than the caller's. */
5483 ret = plus_constant (ptr_mode, ret, STACK_POINTER_OFFSET);
5485 return force_reg (ptr_mode, ret);
5488 /* Expand a call to builtin function __builtin_strub_enter. */
5490 static rtx
5491 expand_builtin_strub_enter (tree exp)
5493 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5494 return NULL_RTX;
5496 if (optimize < 1 || flag_no_inline)
5497 return NULL_RTX;
5499 rtx stktop = expand_builtin_stack_address ();
5501 tree wmptr = CALL_EXPR_ARG (exp, 0);
5502 tree wmtype = TREE_TYPE (TREE_TYPE (wmptr));
5503 tree wmtree = fold_build2 (MEM_REF, wmtype, wmptr,
5504 build_int_cst (TREE_TYPE (wmptr), 0));
5505 rtx wmark = expand_expr (wmtree, NULL_RTX, ptr_mode, EXPAND_MEMORY);
5507 emit_move_insn (wmark, stktop);
5509 return const0_rtx;
5512 /* Expand a call to builtin function __builtin_strub_update. */
5514 static rtx
5515 expand_builtin_strub_update (tree exp)
5517 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5518 return NULL_RTX;
5520 if (optimize < 2 || flag_no_inline)
5521 return NULL_RTX;
5523 rtx stktop = expand_builtin_stack_address ();
5525 #ifdef RED_ZONE_SIZE
5526 /* Here's how the strub enter, update and leave functions deal with red zones.
5528 If it weren't for red zones, update, called from within a strub context,
5529 would bump the watermark to the top of the stack. Enter and leave, running
5530 in the caller, would use the caller's top of stack address both to
5531 initialize the watermark passed to the callee, and to start strubbing the
5532 stack afterwards.
5534 Ideally, we'd update the watermark so as to cover the used amount of red
5535 zone, and strub starting at the caller's other end of the (presumably
5536 unused) red zone. Normally, only leaf functions use the red zone, but at
5537 this point we can't tell whether a function is a leaf, nor can we tell how
5538 much of the red zone it uses. Furthermore, some strub contexts may have
5539 been inlined so that update and leave are called from the same stack frame,
5540 and the strub builtins may all have been inlined, turning a strub function
5541 into a leaf.
5543 So cleaning the range from the caller's stack pointer (one end of the red
5544 zone) to the (potentially inlined) callee's (other end of the) red zone
5545 could scribble over the caller's own red zone.
5547 We avoid this possibility by arranging for callers that are strub contexts
5548 to use their own watermark as the strub starting point. So, if A calls B,
5549 and B calls C, B will tell A to strub up to the end of B's red zone, and
5550 will strub itself only the part of C's stack frame and red zone that
5551 doesn't overlap with B's. With that, we don't need to know who's leaf and
5552 who isn't: inlined calls will shrink their strub window to zero, each
5553 remaining call will strub some portion of the stack, and eventually the
5554 strub context will return to a caller that isn't a strub context itself,
5555 that will therefore use its own stack pointer as the strub starting point.
5556 It's not a leaf, because strub contexts can't be inlined into non-strub
5557 contexts, so it doesn't use the red zone, and it will therefore correctly
5558 strub up the callee's stack frame up to the end of the callee's red zone.
5559 Neat! */
5560 if (true /* (flags_from_decl_or_type (current_function_decl) & ECF_LEAF) */)
5562 poly_int64 red_zone_size = RED_ZONE_SIZE;
5563 #if STACK_GROWS_DOWNWARD
5564 red_zone_size = -red_zone_size;
5565 #endif
5566 stktop = plus_constant (ptr_mode, stktop, red_zone_size);
5567 stktop = force_reg (ptr_mode, stktop);
5569 #endif
5571 tree wmptr = CALL_EXPR_ARG (exp, 0);
5572 tree wmtype = TREE_TYPE (TREE_TYPE (wmptr));
5573 tree wmtree = fold_build2 (MEM_REF, wmtype, wmptr,
5574 build_int_cst (TREE_TYPE (wmptr), 0));
5575 rtx wmark = expand_expr (wmtree, NULL_RTX, ptr_mode, EXPAND_MEMORY);
5577 rtx wmarkr = force_reg (ptr_mode, wmark);
5579 rtx_code_label *lab = gen_label_rtx ();
5580 do_compare_rtx_and_jump (stktop, wmarkr, STACK_TOPS, STACK_UNSIGNED,
5581 ptr_mode, NULL_RTX, lab, NULL,
5582 profile_probability::very_likely ());
5583 emit_move_insn (wmark, stktop);
5585 /* If this is an inlined strub function, also bump the watermark for the
5586 enclosing function. This avoids a problem with the following scenario: A
5587 calls B and B calls C, and both B and C get inlined into A. B allocates
5588 temporary stack space before calling C. If we don't update A's watermark,
5589 we may use an outdated baseline for the post-C strub_leave, erasing B's
5590 temporary stack allocation. We only need this if we're fully expanding
5591 strub_leave inline. */
5592 tree xwmptr = (optimize > 2
5593 ? strub_watermark_parm (current_function_decl)
5594 : wmptr);
5595 if (wmptr != xwmptr)
5597 wmptr = xwmptr;
5598 wmtype = TREE_TYPE (TREE_TYPE (wmptr));
5599 wmtree = fold_build2 (MEM_REF, wmtype, wmptr,
5600 build_int_cst (TREE_TYPE (wmptr), 0));
5601 wmark = expand_expr (wmtree, NULL_RTX, ptr_mode, EXPAND_MEMORY);
5602 wmarkr = force_reg (ptr_mode, wmark);
5604 do_compare_rtx_and_jump (stktop, wmarkr, STACK_TOPS, STACK_UNSIGNED,
5605 ptr_mode, NULL_RTX, lab, NULL,
5606 profile_probability::very_likely ());
5607 emit_move_insn (wmark, stktop);
5610 emit_label (lab);
5612 return const0_rtx;
5616 /* Expand a call to builtin function __builtin_strub_leave. */
5618 static rtx
5619 expand_builtin_strub_leave (tree exp)
5621 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5622 return NULL_RTX;
5624 if (optimize < 2 || optimize_size || flag_no_inline)
5625 return NULL_RTX;
5627 rtx stktop = NULL_RTX;
5629 if (tree wmptr = (optimize
5630 ? strub_watermark_parm (current_function_decl)
5631 : NULL_TREE))
5633 tree wmtype = TREE_TYPE (TREE_TYPE (wmptr));
5634 tree wmtree = fold_build2 (MEM_REF, wmtype, wmptr,
5635 build_int_cst (TREE_TYPE (wmptr), 0));
5636 rtx wmark = expand_expr (wmtree, NULL_RTX, ptr_mode, EXPAND_MEMORY);
5637 stktop = force_reg (ptr_mode, wmark);
5640 if (!stktop)
5641 stktop = expand_builtin_stack_address ();
5643 tree wmptr = CALL_EXPR_ARG (exp, 0);
5644 tree wmtype = TREE_TYPE (TREE_TYPE (wmptr));
5645 tree wmtree = fold_build2 (MEM_REF, wmtype, wmptr,
5646 build_int_cst (TREE_TYPE (wmptr), 0));
5647 rtx wmark = expand_expr (wmtree, NULL_RTX, ptr_mode, EXPAND_MEMORY);
5649 rtx wmarkr = force_reg (ptr_mode, wmark);
5651 #if ! STACK_GROWS_DOWNWARD
5652 rtx base = stktop;
5653 rtx end = wmarkr;
5654 #else
5655 rtx base = wmarkr;
5656 rtx end = stktop;
5657 #endif
5659 /* We're going to modify it, so make sure it's not e.g. the stack pointer. */
5660 base = copy_to_reg (base);
5662 rtx_code_label *done = gen_label_rtx ();
5663 do_compare_rtx_and_jump (base, end, LT, STACK_UNSIGNED,
5664 ptr_mode, NULL_RTX, done, NULL,
5665 profile_probability::very_likely ());
5667 if (optimize < 3)
5668 expand_call (exp, NULL_RTX, true);
5669 else
5671 /* Ok, now we've determined we want to copy the block, so convert the
5672 addresses to Pmode, as needed to dereference them to access ptr_mode
5673 memory locations, so that we don't have to convert anything within the
5674 loop. */
5675 base = memory_address (ptr_mode, base);
5676 end = memory_address (ptr_mode, end);
5678 rtx zero = force_operand (const0_rtx, NULL_RTX);
5679 int ulen = GET_MODE_SIZE (ptr_mode);
5681 /* ??? It would be nice to use setmem or similar patterns here,
5682 but they do not necessarily obey the stack growth direction,
5683 which has security implications. We also have to avoid calls
5684 (memset, bzero or any machine-specific ones), which are
5685 likely unsafe here (see TARGET_STRUB_MAY_USE_MEMSET). */
5686 #if ! STACK_GROWS_DOWNWARD
5687 rtx incr = plus_constant (Pmode, base, ulen);
5688 rtx dstm = gen_rtx_MEM (ptr_mode, base);
5690 rtx_code_label *loop = gen_label_rtx ();
5691 emit_label (loop);
5692 emit_move_insn (dstm, zero);
5693 emit_move_insn (base, force_operand (incr, NULL_RTX));
5694 #else
5695 rtx decr = plus_constant (Pmode, end, -ulen);
5696 rtx dstm = gen_rtx_MEM (ptr_mode, end);
5698 rtx_code_label *loop = gen_label_rtx ();
5699 emit_label (loop);
5700 emit_move_insn (end, force_operand (decr, NULL_RTX));
5701 emit_move_insn (dstm, zero);
5702 #endif
5703 do_compare_rtx_and_jump (base, end, LT, STACK_UNSIGNED,
5704 Pmode, NULL_RTX, NULL, loop,
5705 profile_probability::very_likely ());
5708 emit_label (done);
5710 return const0_rtx;
5713 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5714 failed and the caller should emit a normal call. */
5716 static rtx
5717 expand_builtin_alloca (tree exp)
5719 rtx op0;
5720 rtx result;
5721 unsigned int align;
5722 tree fndecl = get_callee_fndecl (exp);
5723 HOST_WIDE_INT max_size;
5724 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5725 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5726 bool valid_arglist
5727 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5728 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5729 VOID_TYPE)
5730 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5731 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5732 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5734 if (!valid_arglist)
5735 return NULL_RTX;
5737 /* Compute the argument. */
5738 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5740 /* Compute the alignment. */
5741 align = (fcode == BUILT_IN_ALLOCA
5742 ? BIGGEST_ALIGNMENT
5743 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5745 /* Compute the maximum size. */
5746 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5747 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5748 : -1);
5750 /* Allocate the desired space. If the allocation stems from the declaration
5751 of a variable-sized object, it cannot accumulate. */
5752 result
5753 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5754 result = convert_memory_address (ptr_mode, result);
5756 /* Dynamic allocations for variables are recorded during gimplification. */
5757 if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
5758 record_dynamic_alloc (exp);
5760 return result;
5763 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5764 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5765 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5766 handle_builtin_stack_restore function. */
5768 static rtx
5769 expand_asan_emit_allocas_unpoison (tree exp)
5771 tree arg0 = CALL_EXPR_ARG (exp, 0);
5772 tree arg1 = CALL_EXPR_ARG (exp, 1);
5773 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5774 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5775 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5776 stack_pointer_rtx, NULL_RTX, 0,
5777 OPTAB_LIB_WIDEN);
5778 off = convert_modes (ptr_mode, Pmode, off, 0);
5779 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5780 OPTAB_LIB_WIDEN);
5781 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5782 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5783 top, ptr_mode, bot, ptr_mode);
5784 return ret;
5787 /* Expand a call to bswap builtin in EXP.
5788 Return NULL_RTX if a normal call should be emitted rather than expanding the
5789 function in-line. If convenient, the result should be placed in TARGET.
5790 SUBTARGET may be used as the target for computing one of EXP's operands. */
5792 static rtx
5793 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5794 rtx subtarget)
5796 tree arg;
5797 rtx op0;
5799 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5800 return NULL_RTX;
5802 arg = CALL_EXPR_ARG (exp, 0);
5803 op0 = expand_expr (arg,
5804 subtarget && GET_MODE (subtarget) == target_mode
5805 ? subtarget : NULL_RTX,
5806 target_mode, EXPAND_NORMAL);
5807 if (GET_MODE (op0) != target_mode)
5808 op0 = convert_to_mode (target_mode, op0, 1);
5810 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5812 gcc_assert (target);
5814 return convert_to_mode (target_mode, target, 1);
5817 /* Expand a call to a unary builtin in EXP.
5818 Return NULL_RTX if a normal call should be emitted rather than expanding the
5819 function in-line. If convenient, the result should be placed in TARGET.
5820 SUBTARGET may be used as the target for computing one of EXP's operands. */
5822 static rtx
5823 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5824 rtx subtarget, optab op_optab)
5826 rtx op0;
5828 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5829 return NULL_RTX;
5831 /* Compute the argument. */
5832 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5833 (subtarget
5834 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5835 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5836 VOIDmode, EXPAND_NORMAL);
5837 /* Compute op, into TARGET if possible.
5838 Set TARGET to wherever the result comes back. */
5839 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5840 op_optab, op0, target, op_optab != clrsb_optab);
5841 gcc_assert (target);
5843 return convert_to_mode (target_mode, target, 0);
5846 /* Expand a call to __builtin_expect. We just return our argument
5847 as the builtin_expect semantic should've been already executed by
5848 tree branch prediction pass. */
5850 static rtx
5851 expand_builtin_expect (tree exp, rtx target)
5853 tree arg;
5855 if (call_expr_nargs (exp) < 2)
5856 return const0_rtx;
5857 arg = CALL_EXPR_ARG (exp, 0);
5859 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5860 /* When guessing was done, the hints should be already stripped away. */
5861 gcc_assert (!flag_guess_branch_prob
5862 || optimize == 0 || seen_error ());
5863 return target;
5866 /* Expand a call to __builtin_expect_with_probability. We just return our
5867 argument as the builtin_expect semantic should've been already executed by
5868 tree branch prediction pass. */
5870 static rtx
5871 expand_builtin_expect_with_probability (tree exp, rtx target)
5873 tree arg;
5875 if (call_expr_nargs (exp) < 3)
5876 return const0_rtx;
5877 arg = CALL_EXPR_ARG (exp, 0);
5879 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5880 /* When guessing was done, the hints should be already stripped away. */
5881 gcc_assert (!flag_guess_branch_prob
5882 || optimize == 0 || seen_error ());
5883 return target;
5887 /* Expand a call to __builtin_assume_aligned. We just return our first
5888 argument as the builtin_assume_aligned semantic should've been already
5889 executed by CCP. */
5891 static rtx
5892 expand_builtin_assume_aligned (tree exp, rtx target)
5894 if (call_expr_nargs (exp) < 2)
5895 return const0_rtx;
5896 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5897 EXPAND_NORMAL);
5898 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5899 && (call_expr_nargs (exp) < 3
5900 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5901 return target;
5904 void
5905 expand_builtin_trap (void)
5907 if (targetm.have_trap ())
5909 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5910 /* For trap insns when not accumulating outgoing args force
5911 REG_ARGS_SIZE note to prevent crossjumping of calls with
5912 different args sizes. */
5913 if (!ACCUMULATE_OUTGOING_ARGS)
5914 add_args_size_note (insn, stack_pointer_delta);
5916 else
5918 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5919 tree call_expr = build_call_expr (fn, 0);
5920 expand_call (call_expr, NULL_RTX, false);
5923 emit_barrier ();
5926 /* Expand a call to __builtin_unreachable. We do nothing except emit
5927 a barrier saying that control flow will not pass here.
5929 It is the responsibility of the program being compiled to ensure
5930 that control flow does never reach __builtin_unreachable. */
5931 static void
5932 expand_builtin_unreachable (void)
5934 /* Use gimple_build_builtin_unreachable or builtin_decl_unreachable
5935 to avoid this. */
5936 gcc_checking_assert (!sanitize_flags_p (SANITIZE_UNREACHABLE));
5937 emit_barrier ();
5940 /* Expand EXP, a call to fabs, fabsf or fabsl.
5941 Return NULL_RTX if a normal call should be emitted rather than expanding
5942 the function inline. If convenient, the result should be placed
5943 in TARGET. SUBTARGET may be used as the target for computing
5944 the operand. */
5946 static rtx
5947 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5949 machine_mode mode;
5950 tree arg;
5951 rtx op0;
5953 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5954 return NULL_RTX;
5956 arg = CALL_EXPR_ARG (exp, 0);
5957 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5958 mode = TYPE_MODE (TREE_TYPE (arg));
5959 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5960 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5963 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5964 Return NULL is a normal call should be emitted rather than expanding the
5965 function inline. If convenient, the result should be placed in TARGET.
5966 SUBTARGET may be used as the target for computing the operand. */
5968 static rtx
5969 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5971 rtx op0, op1;
5972 tree arg;
5974 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5975 return NULL_RTX;
5977 arg = CALL_EXPR_ARG (exp, 0);
5978 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5980 arg = CALL_EXPR_ARG (exp, 1);
5981 op1 = expand_normal (arg);
5983 return expand_copysign (op0, op1, target);
5986 /* Emit a call to __builtin___clear_cache. */
5988 void
5989 default_emit_call_builtin___clear_cache (rtx begin, rtx end)
5991 rtx callee = gen_rtx_SYMBOL_REF (Pmode,
5992 BUILTIN_ASM_NAME_PTR
5993 (BUILT_IN_CLEAR_CACHE));
5995 emit_library_call (callee,
5996 LCT_NORMAL, VOIDmode,
5997 convert_memory_address (ptr_mode, begin), ptr_mode,
5998 convert_memory_address (ptr_mode, end), ptr_mode);
6001 /* Emit a call to __builtin___clear_cache, unless the target specifies
6002 it as do-nothing. This function can be used by trampoline
6003 finalizers to duplicate the effects of expanding a call to the
6004 clear_cache builtin. */
6006 void
6007 maybe_emit_call_builtin___clear_cache (rtx begin, rtx end)
6009 gcc_assert ((GET_MODE (begin) == ptr_mode || GET_MODE (begin) == Pmode
6010 || CONST_INT_P (begin))
6011 && (GET_MODE (end) == ptr_mode || GET_MODE (end) == Pmode
6012 || CONST_INT_P (end)));
6014 if (targetm.have_clear_cache ())
6016 /* We have a "clear_cache" insn, and it will handle everything. */
6017 class expand_operand ops[2];
6019 create_address_operand (&ops[0], begin);
6020 create_address_operand (&ops[1], end);
6022 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
6023 return;
6025 else
6027 #ifndef CLEAR_INSN_CACHE
6028 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
6029 does nothing. There is no need to call it. Do nothing. */
6030 return;
6031 #endif /* CLEAR_INSN_CACHE */
6034 targetm.calls.emit_call_builtin___clear_cache (begin, end);
6037 /* Expand a call to __builtin___clear_cache. */
6039 static void
6040 expand_builtin___clear_cache (tree exp)
6042 tree begin, end;
6043 rtx begin_rtx, end_rtx;
6045 /* We must not expand to a library call. If we did, any
6046 fallback library function in libgcc that might contain a call to
6047 __builtin___clear_cache() would recurse infinitely. */
6048 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6050 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
6051 return;
6054 begin = CALL_EXPR_ARG (exp, 0);
6055 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
6057 end = CALL_EXPR_ARG (exp, 1);
6058 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
6060 maybe_emit_call_builtin___clear_cache (begin_rtx, end_rtx);
6063 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
6065 static rtx
6066 round_trampoline_addr (rtx tramp)
6068 rtx temp, addend, mask;
6070 /* If we don't need too much alignment, we'll have been guaranteed
6071 proper alignment by get_trampoline_type. */
6072 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
6073 return tramp;
6075 /* Round address up to desired boundary. */
6076 temp = gen_reg_rtx (Pmode);
6077 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
6078 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
6080 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
6081 temp, 0, OPTAB_LIB_WIDEN);
6082 tramp = expand_simple_binop (Pmode, AND, temp, mask,
6083 temp, 0, OPTAB_LIB_WIDEN);
6085 return tramp;
6088 static rtx
6089 expand_builtin_init_trampoline (tree exp, bool onstack)
6091 tree t_tramp, t_func, t_chain;
6092 rtx m_tramp, r_tramp, r_chain, tmp;
6094 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
6095 POINTER_TYPE, VOID_TYPE))
6096 return NULL_RTX;
6098 t_tramp = CALL_EXPR_ARG (exp, 0);
6099 t_func = CALL_EXPR_ARG (exp, 1);
6100 t_chain = CALL_EXPR_ARG (exp, 2);
6102 r_tramp = expand_normal (t_tramp);
6103 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
6104 MEM_NOTRAP_P (m_tramp) = 1;
6106 /* If ONSTACK, the TRAMP argument should be the address of a field
6107 within the local function's FRAME decl. Either way, let's see if
6108 we can fill in the MEM_ATTRs for this memory. */
6109 if (TREE_CODE (t_tramp) == ADDR_EXPR)
6110 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
6112 /* Creator of a heap trampoline is responsible for making sure the
6113 address is aligned to at least STACK_BOUNDARY. Normally malloc
6114 will ensure this anyhow. */
6115 tmp = round_trampoline_addr (r_tramp);
6116 if (tmp != r_tramp)
6118 m_tramp = change_address (m_tramp, BLKmode, tmp);
6119 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
6120 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
6123 /* The FUNC argument should be the address of the nested function.
6124 Extract the actual function decl to pass to the hook. */
6125 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
6126 t_func = TREE_OPERAND (t_func, 0);
6127 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
6129 r_chain = expand_normal (t_chain);
6131 /* Generate insns to initialize the trampoline. */
6132 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
6134 if (onstack)
6136 trampolines_created = 1;
6138 if (targetm.calls.custom_function_descriptors != 0)
6139 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
6140 "trampoline generated for nested function %qD", t_func);
6143 return const0_rtx;
6146 static rtx
6147 expand_builtin_adjust_trampoline (tree exp)
6149 rtx tramp;
6151 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6152 return NULL_RTX;
6154 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
6155 tramp = round_trampoline_addr (tramp);
6156 if (targetm.calls.trampoline_adjust_address)
6157 tramp = targetm.calls.trampoline_adjust_address (tramp);
6159 return tramp;
6162 /* Expand a call to the builtin descriptor initialization routine.
6163 A descriptor is made up of a couple of pointers to the static
6164 chain and the code entry in this order. */
6166 static rtx
6167 expand_builtin_init_descriptor (tree exp)
6169 tree t_descr, t_func, t_chain;
6170 rtx m_descr, r_descr, r_func, r_chain;
6172 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
6173 VOID_TYPE))
6174 return NULL_RTX;
6176 t_descr = CALL_EXPR_ARG (exp, 0);
6177 t_func = CALL_EXPR_ARG (exp, 1);
6178 t_chain = CALL_EXPR_ARG (exp, 2);
6180 r_descr = expand_normal (t_descr);
6181 m_descr = gen_rtx_MEM (BLKmode, r_descr);
6182 MEM_NOTRAP_P (m_descr) = 1;
6183 set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
6185 r_func = expand_normal (t_func);
6186 r_chain = expand_normal (t_chain);
6188 /* Generate insns to initialize the descriptor. */
6189 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
6190 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
6191 POINTER_SIZE / BITS_PER_UNIT), r_func);
6193 return const0_rtx;
6196 /* Expand a call to the builtin descriptor adjustment routine. */
6198 static rtx
6199 expand_builtin_adjust_descriptor (tree exp)
6201 rtx tramp;
6203 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6204 return NULL_RTX;
6206 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
6208 /* Unalign the descriptor to allow runtime identification. */
6209 tramp = plus_constant (ptr_mode, tramp,
6210 targetm.calls.custom_function_descriptors);
6212 return force_operand (tramp, NULL_RTX);
6215 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
6216 function. The function first checks whether the back end provides
6217 an insn to implement signbit for the respective mode. If not, it
6218 checks whether the floating point format of the value is such that
6219 the sign bit can be extracted. If that is not the case, error out.
6220 EXP is the expression that is a call to the builtin function; if
6221 convenient, the result should be placed in TARGET. */
6222 static rtx
6223 expand_builtin_signbit (tree exp, rtx target)
6225 const struct real_format *fmt;
6226 scalar_float_mode fmode;
6227 scalar_int_mode rmode, imode;
6228 tree arg;
6229 int word, bitpos;
6230 enum insn_code icode;
6231 rtx temp;
6232 location_t loc = EXPR_LOCATION (exp);
6234 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
6235 return NULL_RTX;
6237 arg = CALL_EXPR_ARG (exp, 0);
6238 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
6239 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
6240 fmt = REAL_MODE_FORMAT (fmode);
6242 arg = builtin_save_expr (arg);
6244 /* Expand the argument yielding a RTX expression. */
6245 temp = expand_normal (arg);
6247 /* Check if the back end provides an insn that handles signbit for the
6248 argument's mode. */
6249 icode = optab_handler (signbit_optab, fmode);
6250 if (icode != CODE_FOR_nothing)
6252 rtx_insn *last = get_last_insn ();
6253 rtx this_target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6254 if (maybe_emit_unop_insn (icode, this_target, temp, UNKNOWN))
6255 return this_target;
6256 delete_insns_since (last);
6259 /* For floating point formats without a sign bit, implement signbit
6260 as "ARG < 0.0". */
6261 bitpos = fmt->signbit_ro;
6262 if (bitpos < 0)
6264 /* But we can't do this if the format supports signed zero. */
6265 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
6267 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
6268 build_real (TREE_TYPE (arg), dconst0));
6269 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
6272 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
6274 imode = int_mode_for_mode (fmode).require ();
6275 temp = gen_lowpart (imode, temp);
6277 else
6279 imode = word_mode;
6280 /* Handle targets with different FP word orders. */
6281 if (FLOAT_WORDS_BIG_ENDIAN)
6282 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
6283 else
6284 word = bitpos / BITS_PER_WORD;
6285 temp = operand_subword_force (temp, word, fmode);
6286 bitpos = bitpos % BITS_PER_WORD;
6289 /* Force the intermediate word_mode (or narrower) result into a
6290 register. This avoids attempting to create paradoxical SUBREGs
6291 of floating point modes below. */
6292 temp = force_reg (imode, temp);
6294 /* If the bitpos is within the "result mode" lowpart, the operation
6295 can be implement with a single bitwise AND. Otherwise, we need
6296 a right shift and an AND. */
6298 if (bitpos < GET_MODE_BITSIZE (rmode))
6300 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
6302 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
6303 temp = gen_lowpart (rmode, temp);
6304 temp = expand_binop (rmode, and_optab, temp,
6305 immed_wide_int_const (mask, rmode),
6306 NULL_RTX, 1, OPTAB_LIB_WIDEN);
6308 else
6310 /* Perform a logical right shift to place the signbit in the least
6311 significant bit, then truncate the result to the desired mode
6312 and mask just this bit. */
6313 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
6314 temp = gen_lowpart (rmode, temp);
6315 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
6316 NULL_RTX, 1, OPTAB_LIB_WIDEN);
6319 return temp;
6322 /* Expand fork or exec calls. TARGET is the desired target of the
6323 call. EXP is the call. FN is the
6324 identificator of the actual function. IGNORE is nonzero if the
6325 value is to be ignored. */
6327 static rtx
6328 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
6330 tree id, decl;
6331 tree call;
6333 /* If we are not profiling, just call the function. */
6334 if (!profile_arc_flag)
6335 return NULL_RTX;
6337 /* Otherwise call the wrapper. This should be equivalent for the rest of
6338 compiler, so the code does not diverge, and the wrapper may run the
6339 code necessary for keeping the profiling sane. */
6341 switch (DECL_FUNCTION_CODE (fn))
6343 case BUILT_IN_FORK:
6344 id = get_identifier ("__gcov_fork");
6345 break;
6347 case BUILT_IN_EXECL:
6348 id = get_identifier ("__gcov_execl");
6349 break;
6351 case BUILT_IN_EXECV:
6352 id = get_identifier ("__gcov_execv");
6353 break;
6355 case BUILT_IN_EXECLP:
6356 id = get_identifier ("__gcov_execlp");
6357 break;
6359 case BUILT_IN_EXECLE:
6360 id = get_identifier ("__gcov_execle");
6361 break;
6363 case BUILT_IN_EXECVP:
6364 id = get_identifier ("__gcov_execvp");
6365 break;
6367 case BUILT_IN_EXECVE:
6368 id = get_identifier ("__gcov_execve");
6369 break;
6371 default:
6372 gcc_unreachable ();
6375 decl = build_decl (DECL_SOURCE_LOCATION (fn),
6376 FUNCTION_DECL, id, TREE_TYPE (fn));
6377 DECL_EXTERNAL (decl) = 1;
6378 TREE_PUBLIC (decl) = 1;
6379 DECL_ARTIFICIAL (decl) = 1;
6380 TREE_NOTHROW (decl) = 1;
6381 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
6382 DECL_VISIBILITY_SPECIFIED (decl) = 1;
6383 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
6384 return expand_call (call, target, ignore);
6389 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
6390 the pointer in these functions is void*, the tree optimizers may remove
6391 casts. The mode computed in expand_builtin isn't reliable either, due
6392 to __sync_bool_compare_and_swap.
6394 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
6395 group of builtins. This gives us log2 of the mode size. */
6397 static inline machine_mode
6398 get_builtin_sync_mode (int fcode_diff)
6400 /* The size is not negotiable, so ask not to get BLKmode in return
6401 if the target indicates that a smaller size would be better. */
6402 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
6405 /* Expand the memory expression LOC and return the appropriate memory operand
6406 for the builtin_sync operations. */
6408 static rtx
6409 get_builtin_sync_mem (tree loc, machine_mode mode)
6411 rtx addr, mem;
6412 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
6413 ? TREE_TYPE (TREE_TYPE (loc))
6414 : TREE_TYPE (loc));
6415 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
6417 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
6418 addr = convert_memory_address (addr_mode, addr);
6420 /* Note that we explicitly do not want any alias information for this
6421 memory, so that we kill all other live memories. Otherwise we don't
6422 satisfy the full barrier semantics of the intrinsic. */
6423 mem = gen_rtx_MEM (mode, addr);
6425 set_mem_addr_space (mem, addr_space);
6427 mem = validize_mem (mem);
6429 /* The alignment needs to be at least according to that of the mode. */
6430 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
6431 get_pointer_alignment (loc)));
6432 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6433 MEM_VOLATILE_P (mem) = 1;
6435 return mem;
6438 /* Make sure an argument is in the right mode.
6439 EXP is the tree argument.
6440 MODE is the mode it should be in. */
6442 static rtx
6443 expand_expr_force_mode (tree exp, machine_mode mode)
6445 rtx val;
6446 machine_mode old_mode;
6448 if (TREE_CODE (exp) == SSA_NAME
6449 && TYPE_MODE (TREE_TYPE (exp)) != mode)
6451 /* Undo argument promotion if possible, as combine might not
6452 be able to do it later due to MEM_VOLATILE_P uses in the
6453 patterns. */
6454 gimple *g = get_gimple_for_ssa_name (exp);
6455 if (g && gimple_assign_cast_p (g))
6457 tree rhs = gimple_assign_rhs1 (g);
6458 tree_code code = gimple_assign_rhs_code (g);
6459 if (CONVERT_EXPR_CODE_P (code)
6460 && TYPE_MODE (TREE_TYPE (rhs)) == mode
6461 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
6462 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
6463 && (TYPE_PRECISION (TREE_TYPE (exp))
6464 > TYPE_PRECISION (TREE_TYPE (rhs))))
6465 exp = rhs;
6469 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
6470 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6471 of CONST_INTs, where we know the old_mode only from the call argument. */
6473 old_mode = GET_MODE (val);
6474 if (old_mode == VOIDmode)
6475 old_mode = TYPE_MODE (TREE_TYPE (exp));
6476 val = convert_modes (mode, old_mode, val, 1);
6477 return val;
6481 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6482 EXP is the CALL_EXPR. CODE is the rtx code
6483 that corresponds to the arithmetic or logical operation from the name;
6484 an exception here is that NOT actually means NAND. TARGET is an optional
6485 place for us to store the results; AFTER is true if this is the
6486 fetch_and_xxx form. */
6488 static rtx
6489 expand_builtin_sync_operation (machine_mode mode, tree exp,
6490 enum rtx_code code, bool after,
6491 rtx target)
6493 rtx val, mem;
6494 location_t loc = EXPR_LOCATION (exp);
6496 if (code == NOT && warn_sync_nand)
6498 tree fndecl = get_callee_fndecl (exp);
6499 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6501 static bool warned_f_a_n, warned_n_a_f;
6503 switch (fcode)
6505 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6506 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6507 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6508 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6509 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6510 if (warned_f_a_n)
6511 break;
6513 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
6514 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6515 warned_f_a_n = true;
6516 break;
6518 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6519 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6520 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6521 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6522 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6523 if (warned_n_a_f)
6524 break;
6526 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
6527 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6528 warned_n_a_f = true;
6529 break;
6531 default:
6532 gcc_unreachable ();
6536 /* Expand the operands. */
6537 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6538 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6540 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
6541 after);
6544 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6545 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6546 true if this is the boolean form. TARGET is a place for us to store the
6547 results; this is NOT optional if IS_BOOL is true. */
6549 static rtx
6550 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
6551 bool is_bool, rtx target)
6553 rtx old_val, new_val, mem;
6554 rtx *pbool, *poval;
6556 /* Expand the operands. */
6557 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6558 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6559 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6561 pbool = poval = NULL;
6562 if (target != const0_rtx)
6564 if (is_bool)
6565 pbool = &target;
6566 else
6567 poval = &target;
6569 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
6570 false, MEMMODEL_SYNC_SEQ_CST,
6571 MEMMODEL_SYNC_SEQ_CST))
6572 return NULL_RTX;
6574 return target;
6577 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6578 general form is actually an atomic exchange, and some targets only
6579 support a reduced form with the second argument being a constant 1.
6580 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6581 the results. */
6583 static rtx
6584 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
6585 rtx target)
6587 rtx val, mem;
6589 /* Expand the operands. */
6590 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6591 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6593 return expand_sync_lock_test_and_set (target, mem, val);
6596 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6598 static void
6599 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
6601 rtx mem;
6603 /* Expand the operands. */
6604 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6606 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
6609 /* Given an integer representing an ``enum memmodel'', verify its
6610 correctness and return the memory model enum. */
6612 static enum memmodel
6613 get_memmodel (tree exp)
6615 /* If the parameter is not a constant, it's a run time value so we'll just
6616 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6617 if (TREE_CODE (exp) != INTEGER_CST)
6618 return MEMMODEL_SEQ_CST;
6620 rtx op = expand_normal (exp);
6622 unsigned HOST_WIDE_INT val = INTVAL (op);
6623 if (targetm.memmodel_check)
6624 val = targetm.memmodel_check (val);
6625 else if (val & ~MEMMODEL_MASK)
6626 return MEMMODEL_SEQ_CST;
6628 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6629 if (memmodel_base (val) >= MEMMODEL_LAST)
6630 return MEMMODEL_SEQ_CST;
6632 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6633 be conservative and promote consume to acquire. */
6634 if (val == MEMMODEL_CONSUME)
6635 val = MEMMODEL_ACQUIRE;
6637 return (enum memmodel) val;
6640 /* Expand the __atomic_exchange intrinsic:
6641 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6642 EXP is the CALL_EXPR.
6643 TARGET is an optional place for us to store the results. */
6645 static rtx
6646 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
6648 rtx val, mem;
6649 enum memmodel model;
6651 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6653 if (!flag_inline_atomics)
6654 return NULL_RTX;
6656 /* Expand the operands. */
6657 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6658 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6660 return expand_atomic_exchange (target, mem, val, model);
6663 /* Expand the __atomic_compare_exchange intrinsic:
6664 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6665 TYPE desired, BOOL weak,
6666 enum memmodel success,
6667 enum memmodel failure)
6668 EXP is the CALL_EXPR.
6669 TARGET is an optional place for us to store the results. */
6671 static rtx
6672 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
6673 rtx target)
6675 rtx expect, desired, mem, oldval;
6676 rtx_code_label *label;
6677 tree weak;
6678 bool is_weak;
6680 memmodel success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6681 memmodel failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6683 if (failure > success)
6684 success = MEMMODEL_SEQ_CST;
6686 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6688 failure = MEMMODEL_SEQ_CST;
6689 success = MEMMODEL_SEQ_CST;
6693 if (!flag_inline_atomics)
6694 return NULL_RTX;
6696 /* Expand the operands. */
6697 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6699 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6700 expect = convert_memory_address (Pmode, expect);
6701 expect = gen_rtx_MEM (mode, expect);
6702 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6704 weak = CALL_EXPR_ARG (exp, 3);
6705 is_weak = false;
6706 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6707 is_weak = true;
6709 if (target == const0_rtx)
6710 target = NULL;
6712 /* Lest the rtl backend create a race condition with an imporoper store
6713 to memory, always create a new pseudo for OLDVAL. */
6714 oldval = NULL;
6716 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6717 is_weak, success, failure))
6718 return NULL_RTX;
6720 /* Conditionally store back to EXPECT, lest we create a race condition
6721 with an improper store to memory. */
6722 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6723 the normal case where EXPECT is totally private, i.e. a register. At
6724 which point the store can be unconditional. */
6725 label = gen_label_rtx ();
6726 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6727 GET_MODE (target), 1, label);
6728 emit_move_insn (expect, oldval);
6729 emit_label (label);
6731 return target;
6734 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6735 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6736 call. The weak parameter must be dropped to match the expected parameter
6737 list and the expected argument changed from value to pointer to memory
6738 slot. */
6740 static void
6741 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6743 unsigned int z;
6744 vec<tree, va_gc> *vec;
6746 vec_alloc (vec, 5);
6747 vec->quick_push (gimple_call_arg (call, 0));
6748 tree expected = gimple_call_arg (call, 1);
6749 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6750 TREE_TYPE (expected));
6751 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6752 if (expd != x)
6753 emit_move_insn (x, expd);
6754 tree v = make_tree (TREE_TYPE (expected), x);
6755 vec->quick_push (build1 (ADDR_EXPR,
6756 build_pointer_type (TREE_TYPE (expected)), v));
6757 vec->quick_push (gimple_call_arg (call, 2));
6758 /* Skip the boolean weak parameter. */
6759 for (z = 4; z < 6; z++)
6760 vec->quick_push (gimple_call_arg (call, z));
6761 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6762 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6763 gcc_assert (bytes_log2 < 5);
6764 built_in_function fncode
6765 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6766 + bytes_log2);
6767 tree fndecl = builtin_decl_explicit (fncode);
6768 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6769 fndecl);
6770 tree exp = build_call_vec (boolean_type_node, fn, vec);
6771 tree lhs = gimple_call_lhs (call);
6772 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6773 if (lhs)
6775 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6776 if (GET_MODE (boolret) != mode)
6777 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6778 x = force_reg (mode, x);
6779 write_complex_part (target, boolret, true, true);
6780 write_complex_part (target, x, false, false);
6784 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6786 void
6787 expand_ifn_atomic_compare_exchange (gcall *call)
6789 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6790 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6791 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6793 memmodel success = get_memmodel (gimple_call_arg (call, 4));
6794 memmodel failure = get_memmodel (gimple_call_arg (call, 5));
6796 if (failure > success)
6797 success = MEMMODEL_SEQ_CST;
6799 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6801 failure = MEMMODEL_SEQ_CST;
6802 success = MEMMODEL_SEQ_CST;
6805 if (!flag_inline_atomics)
6807 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6808 return;
6811 /* Expand the operands. */
6812 rtx mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6814 rtx expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6815 rtx desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6817 bool is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6819 rtx boolret = NULL;
6820 rtx oldval = NULL;
6822 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6823 is_weak, success, failure))
6825 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6826 return;
6829 tree lhs = gimple_call_lhs (call);
6830 if (lhs)
6832 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6833 if (GET_MODE (boolret) != mode)
6834 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6835 write_complex_part (target, boolret, true, true);
6836 write_complex_part (target, oldval, false, false);
6840 /* Expand the __atomic_load intrinsic:
6841 TYPE __atomic_load (TYPE *object, enum memmodel)
6842 EXP is the CALL_EXPR.
6843 TARGET is an optional place for us to store the results. */
6845 static rtx
6846 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6848 memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6849 if (is_mm_release (model) || is_mm_acq_rel (model))
6850 model = MEMMODEL_SEQ_CST;
6852 if (!flag_inline_atomics)
6853 return NULL_RTX;
6855 /* Expand the operand. */
6856 rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6858 return expand_atomic_load (target, mem, model);
6862 /* Expand the __atomic_store intrinsic:
6863 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6864 EXP is the CALL_EXPR.
6865 TARGET is an optional place for us to store the results. */
6867 static rtx
6868 expand_builtin_atomic_store (machine_mode mode, tree exp)
6870 memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6871 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6872 || is_mm_release (model)))
6873 model = MEMMODEL_SEQ_CST;
6875 if (!flag_inline_atomics)
6876 return NULL_RTX;
6878 /* Expand the operands. */
6879 rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6880 rtx val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6882 return expand_atomic_store (mem, val, model, false);
6885 /* Expand the __atomic_fetch_XXX intrinsic:
6886 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6887 EXP is the CALL_EXPR.
6888 TARGET is an optional place for us to store the results.
6889 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6890 FETCH_AFTER is true if returning the result of the operation.
6891 FETCH_AFTER is false if returning the value before the operation.
6892 IGNORE is true if the result is not used.
6893 EXT_CALL is the correct builtin for an external call if this cannot be
6894 resolved to an instruction sequence. */
6896 static rtx
6897 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6898 enum rtx_code code, bool fetch_after,
6899 bool ignore, enum built_in_function ext_call)
6901 rtx val, mem, ret;
6902 enum memmodel model;
6903 tree fndecl;
6904 tree addr;
6906 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6908 /* Expand the operands. */
6909 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6910 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6912 /* Only try generating instructions if inlining is turned on. */
6913 if (flag_inline_atomics)
6915 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6916 if (ret)
6917 return ret;
6920 /* Return if a different routine isn't needed for the library call. */
6921 if (ext_call == BUILT_IN_NONE)
6922 return NULL_RTX;
6924 /* Change the call to the specified function. */
6925 fndecl = get_callee_fndecl (exp);
6926 addr = CALL_EXPR_FN (exp);
6927 STRIP_NOPS (addr);
6929 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6930 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6932 /* If we will emit code after the call, the call cannot be a tail call.
6933 If it is emitted as a tail call, a barrier is emitted after it, and
6934 then all trailing code is removed. */
6935 if (!ignore)
6936 CALL_EXPR_TAILCALL (exp) = 0;
6938 /* Expand the call here so we can emit trailing code. */
6939 ret = expand_call (exp, target, ignore);
6941 /* Replace the original function just in case it matters. */
6942 TREE_OPERAND (addr, 0) = fndecl;
6944 /* Then issue the arithmetic correction to return the right result. */
6945 if (!ignore)
6947 if (code == NOT)
6949 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6950 OPTAB_LIB_WIDEN);
6951 ret = expand_simple_unop (mode, NOT, ret, target, true);
6953 else
6954 ret = expand_simple_binop (mode, code, ret, val, target, true,
6955 OPTAB_LIB_WIDEN);
6957 return ret;
6960 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6962 void
6963 expand_ifn_atomic_bit_test_and (gcall *call)
6965 tree ptr = gimple_call_arg (call, 0);
6966 tree bit = gimple_call_arg (call, 1);
6967 tree flag = gimple_call_arg (call, 2);
6968 tree lhs = gimple_call_lhs (call);
6969 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6970 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6971 enum rtx_code code;
6972 optab optab;
6973 class expand_operand ops[5];
6975 gcc_assert (flag_inline_atomics);
6977 if (gimple_call_num_args (call) == 5)
6978 model = get_memmodel (gimple_call_arg (call, 3));
6980 rtx mem = get_builtin_sync_mem (ptr, mode);
6981 rtx val = expand_expr_force_mode (bit, mode);
6983 switch (gimple_call_internal_fn (call))
6985 case IFN_ATOMIC_BIT_TEST_AND_SET:
6986 code = IOR;
6987 optab = atomic_bit_test_and_set_optab;
6988 break;
6989 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6990 code = XOR;
6991 optab = atomic_bit_test_and_complement_optab;
6992 break;
6993 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6994 code = AND;
6995 optab = atomic_bit_test_and_reset_optab;
6996 break;
6997 default:
6998 gcc_unreachable ();
7001 if (lhs == NULL_TREE)
7003 rtx val2 = expand_simple_binop (mode, ASHIFT, const1_rtx,
7004 val, NULL_RTX, true, OPTAB_DIRECT);
7005 if (code == AND)
7006 val2 = expand_simple_unop (mode, NOT, val2, NULL_RTX, true);
7007 if (expand_atomic_fetch_op (const0_rtx, mem, val2, code, model, false))
7008 return;
7011 rtx target;
7012 if (lhs)
7013 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
7014 else
7015 target = gen_reg_rtx (mode);
7016 enum insn_code icode = direct_optab_handler (optab, mode);
7017 gcc_assert (icode != CODE_FOR_nothing);
7018 create_output_operand (&ops[0], target, mode);
7019 create_fixed_operand (&ops[1], mem);
7020 create_convert_operand_to (&ops[2], val, mode, true);
7021 create_integer_operand (&ops[3], model);
7022 create_integer_operand (&ops[4], integer_onep (flag));
7023 if (maybe_expand_insn (icode, 5, ops))
7024 return;
7026 rtx bitval = val;
7027 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
7028 val, NULL_RTX, true, OPTAB_DIRECT);
7029 rtx maskval = val;
7030 if (code == AND)
7031 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
7032 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
7033 code, model, false);
7034 if (!result)
7036 bool is_atomic = gimple_call_num_args (call) == 5;
7037 tree tcall = gimple_call_arg (call, 3 + is_atomic);
7038 tree fndecl = gimple_call_addr_fndecl (tcall);
7039 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7040 tree exp = build_call_nary (type, tcall, 2 + is_atomic, ptr,
7041 make_tree (type, val),
7042 is_atomic
7043 ? gimple_call_arg (call, 3)
7044 : integer_zero_node);
7045 result = expand_builtin (exp, gen_reg_rtx (mode), NULL_RTX,
7046 mode, !lhs);
7048 if (!lhs)
7049 return;
7050 if (integer_onep (flag))
7052 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
7053 NULL_RTX, true, OPTAB_DIRECT);
7054 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
7055 true, OPTAB_DIRECT);
7057 else
7058 result = expand_simple_binop (mode, AND, result, maskval, target, true,
7059 OPTAB_DIRECT);
7060 if (result != target)
7061 emit_move_insn (target, result);
7064 /* Expand IFN_ATOMIC_*_FETCH_CMP_0 internal function. */
7066 void
7067 expand_ifn_atomic_op_fetch_cmp_0 (gcall *call)
7069 tree cmp = gimple_call_arg (call, 0);
7070 tree ptr = gimple_call_arg (call, 1);
7071 tree arg = gimple_call_arg (call, 2);
7072 tree lhs = gimple_call_lhs (call);
7073 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
7074 machine_mode mode = TYPE_MODE (TREE_TYPE (cmp));
7075 optab optab;
7076 rtx_code code;
7077 class expand_operand ops[5];
7079 gcc_assert (flag_inline_atomics);
7081 if (gimple_call_num_args (call) == 5)
7082 model = get_memmodel (gimple_call_arg (call, 3));
7084 rtx mem = get_builtin_sync_mem (ptr, mode);
7085 rtx op = expand_expr_force_mode (arg, mode);
7087 switch (gimple_call_internal_fn (call))
7089 case IFN_ATOMIC_ADD_FETCH_CMP_0:
7090 code = PLUS;
7091 optab = atomic_add_fetch_cmp_0_optab;
7092 break;
7093 case IFN_ATOMIC_SUB_FETCH_CMP_0:
7094 code = MINUS;
7095 optab = atomic_sub_fetch_cmp_0_optab;
7096 break;
7097 case IFN_ATOMIC_AND_FETCH_CMP_0:
7098 code = AND;
7099 optab = atomic_and_fetch_cmp_0_optab;
7100 break;
7101 case IFN_ATOMIC_OR_FETCH_CMP_0:
7102 code = IOR;
7103 optab = atomic_or_fetch_cmp_0_optab;
7104 break;
7105 case IFN_ATOMIC_XOR_FETCH_CMP_0:
7106 code = XOR;
7107 optab = atomic_xor_fetch_cmp_0_optab;
7108 break;
7109 default:
7110 gcc_unreachable ();
7113 enum rtx_code comp = UNKNOWN;
7114 switch (tree_to_uhwi (cmp))
7116 case ATOMIC_OP_FETCH_CMP_0_EQ: comp = EQ; break;
7117 case ATOMIC_OP_FETCH_CMP_0_NE: comp = NE; break;
7118 case ATOMIC_OP_FETCH_CMP_0_GT: comp = GT; break;
7119 case ATOMIC_OP_FETCH_CMP_0_GE: comp = GE; break;
7120 case ATOMIC_OP_FETCH_CMP_0_LT: comp = LT; break;
7121 case ATOMIC_OP_FETCH_CMP_0_LE: comp = LE; break;
7122 default: gcc_unreachable ();
7125 rtx target;
7126 if (lhs == NULL_TREE)
7127 target = gen_reg_rtx (TYPE_MODE (boolean_type_node));
7128 else
7129 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
7130 enum insn_code icode = direct_optab_handler (optab, mode);
7131 gcc_assert (icode != CODE_FOR_nothing);
7132 create_output_operand (&ops[0], target, TYPE_MODE (boolean_type_node));
7133 create_fixed_operand (&ops[1], mem);
7134 create_convert_operand_to (&ops[2], op, mode, true);
7135 create_integer_operand (&ops[3], model);
7136 create_integer_operand (&ops[4], comp);
7137 if (maybe_expand_insn (icode, 5, ops))
7138 return;
7140 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, op,
7141 code, model, true);
7142 if (!result)
7144 bool is_atomic = gimple_call_num_args (call) == 5;
7145 tree tcall = gimple_call_arg (call, 3 + is_atomic);
7146 tree fndecl = gimple_call_addr_fndecl (tcall);
7147 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7148 tree exp = build_call_nary (type, tcall,
7149 2 + is_atomic, ptr, arg,
7150 is_atomic
7151 ? gimple_call_arg (call, 3)
7152 : integer_zero_node);
7153 result = expand_builtin (exp, gen_reg_rtx (mode), NULL_RTX,
7154 mode, !lhs);
7157 if (lhs)
7159 result = emit_store_flag_force (target, comp, result, const0_rtx, mode,
7160 0, 1);
7161 if (result != target)
7162 emit_move_insn (target, result);
7166 /* Expand an atomic clear operation.
7167 void _atomic_clear (BOOL *obj, enum memmodel)
7168 EXP is the call expression. */
7170 static rtx
7171 expand_builtin_atomic_clear (tree exp)
7173 machine_mode mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
7174 rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7175 memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7177 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
7178 model = MEMMODEL_SEQ_CST;
7180 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
7181 Failing that, a store is issued by __atomic_store. The only way this can
7182 fail is if the bool type is larger than a word size. Unlikely, but
7183 handle it anyway for completeness. Assume a single threaded model since
7184 there is no atomic support in this case, and no barriers are required. */
7185 rtx ret = expand_atomic_store (mem, const0_rtx, model, true);
7186 if (!ret)
7187 emit_move_insn (mem, const0_rtx);
7188 return const0_rtx;
7191 /* Expand an atomic test_and_set operation.
7192 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
7193 EXP is the call expression. */
7195 static rtx
7196 expand_builtin_atomic_test_and_set (tree exp, rtx target)
7198 rtx mem;
7199 enum memmodel model;
7200 machine_mode mode;
7202 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
7203 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7204 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7206 return expand_atomic_test_and_set (target, mem, model);
7210 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
7211 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
7213 static tree
7214 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
7216 int size;
7217 machine_mode mode;
7218 unsigned int mode_align, type_align;
7220 if (TREE_CODE (arg0) != INTEGER_CST)
7221 return NULL_TREE;
7223 /* We need a corresponding integer mode for the access to be lock-free. */
7224 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
7225 if (!int_mode_for_size (size, 0).exists (&mode))
7226 return boolean_false_node;
7228 mode_align = GET_MODE_ALIGNMENT (mode);
7230 if (TREE_CODE (arg1) == INTEGER_CST)
7232 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
7234 /* Either this argument is null, or it's a fake pointer encoding
7235 the alignment of the object. */
7236 val = least_bit_hwi (val);
7237 val *= BITS_PER_UNIT;
7239 if (val == 0 || mode_align < val)
7240 type_align = mode_align;
7241 else
7242 type_align = val;
7244 else
7246 tree ttype = TREE_TYPE (arg1);
7248 /* This function is usually invoked and folded immediately by the front
7249 end before anything else has a chance to look at it. The pointer
7250 parameter at this point is usually cast to a void *, so check for that
7251 and look past the cast. */
7252 if (CONVERT_EXPR_P (arg1)
7253 && POINTER_TYPE_P (ttype)
7254 && VOID_TYPE_P (TREE_TYPE (ttype))
7255 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
7256 arg1 = TREE_OPERAND (arg1, 0);
7258 ttype = TREE_TYPE (arg1);
7259 gcc_assert (POINTER_TYPE_P (ttype));
7261 /* Get the underlying type of the object. */
7262 ttype = TREE_TYPE (ttype);
7263 type_align = TYPE_ALIGN (ttype);
7266 /* If the object has smaller alignment, the lock free routines cannot
7267 be used. */
7268 if (type_align < mode_align)
7269 return boolean_false_node;
7271 /* Check if a compare_and_swap pattern exists for the mode which represents
7272 the required size. The pattern is not allowed to fail, so the existence
7273 of the pattern indicates support is present. Also require that an
7274 atomic load exists for the required size. */
7275 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
7276 return boolean_true_node;
7277 else
7278 return boolean_false_node;
7281 /* Return true if the parameters to call EXP represent an object which will
7282 always generate lock free instructions. The first argument represents the
7283 size of the object, and the second parameter is a pointer to the object
7284 itself. If NULL is passed for the object, then the result is based on
7285 typical alignment for an object of the specified size. Otherwise return
7286 false. */
7288 static rtx
7289 expand_builtin_atomic_always_lock_free (tree exp)
7291 tree size;
7292 tree arg0 = CALL_EXPR_ARG (exp, 0);
7293 tree arg1 = CALL_EXPR_ARG (exp, 1);
7295 if (TREE_CODE (arg0) != INTEGER_CST)
7297 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
7298 return const0_rtx;
7301 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
7302 if (size == boolean_true_node)
7303 return const1_rtx;
7304 return const0_rtx;
7307 /* Return a one or zero if it can be determined that object ARG1 of size ARG
7308 is lock free on this architecture. */
7310 static tree
7311 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
7313 if (!flag_inline_atomics)
7314 return NULL_TREE;
7316 /* If it isn't always lock free, don't generate a result. */
7317 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
7318 return boolean_true_node;
7320 return NULL_TREE;
7323 /* Return true if the parameters to call EXP represent an object which will
7324 always generate lock free instructions. The first argument represents the
7325 size of the object, and the second parameter is a pointer to the object
7326 itself. If NULL is passed for the object, then the result is based on
7327 typical alignment for an object of the specified size. Otherwise return
7328 NULL*/
7330 static rtx
7331 expand_builtin_atomic_is_lock_free (tree exp)
7333 tree size;
7334 tree arg0 = CALL_EXPR_ARG (exp, 0);
7335 tree arg1 = CALL_EXPR_ARG (exp, 1);
7337 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
7339 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
7340 return NULL_RTX;
7343 if (!flag_inline_atomics)
7344 return NULL_RTX;
7346 /* If the value is known at compile time, return the RTX for it. */
7347 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
7348 if (size == boolean_true_node)
7349 return const1_rtx;
7351 return NULL_RTX;
7354 /* Expand the __atomic_thread_fence intrinsic:
7355 void __atomic_thread_fence (enum memmodel)
7356 EXP is the CALL_EXPR. */
7358 static void
7359 expand_builtin_atomic_thread_fence (tree exp)
7361 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
7362 expand_mem_thread_fence (model);
7365 /* Expand the __atomic_signal_fence intrinsic:
7366 void __atomic_signal_fence (enum memmodel)
7367 EXP is the CALL_EXPR. */
7369 static void
7370 expand_builtin_atomic_signal_fence (tree exp)
7372 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
7373 expand_mem_signal_fence (model);
7376 /* Expand the __sync_synchronize intrinsic. */
7378 static void
7379 expand_builtin_sync_synchronize (void)
7381 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
7384 static rtx
7385 expand_builtin_thread_pointer (tree exp, rtx target)
7387 enum insn_code icode;
7388 if (!validate_arglist (exp, VOID_TYPE))
7389 return const0_rtx;
7390 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
7391 if (icode != CODE_FOR_nothing)
7393 class expand_operand op;
7394 /* If the target is not sutitable then create a new target. */
7395 if (target == NULL_RTX
7396 || !REG_P (target)
7397 || GET_MODE (target) != Pmode)
7398 target = gen_reg_rtx (Pmode);
7399 create_output_operand (&op, target, Pmode);
7400 expand_insn (icode, 1, &op);
7401 return target;
7403 error ("%<__builtin_thread_pointer%> is not supported on this target");
7404 return const0_rtx;
7407 static void
7408 expand_builtin_set_thread_pointer (tree exp)
7410 enum insn_code icode;
7411 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7412 return;
7413 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
7414 if (icode != CODE_FOR_nothing)
7416 class expand_operand op;
7417 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
7418 Pmode, EXPAND_NORMAL);
7419 create_input_operand (&op, val, Pmode);
7420 expand_insn (icode, 1, &op);
7421 return;
7423 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
7427 /* Emit code to restore the current value of stack. */
7429 static void
7430 expand_stack_restore (tree var)
7432 rtx_insn *prev;
7433 rtx sa = expand_normal (var);
7435 sa = convert_memory_address (Pmode, sa);
7437 prev = get_last_insn ();
7438 emit_stack_restore (SAVE_BLOCK, sa);
7440 record_new_stack_level ();
7442 fixup_args_size_notes (prev, get_last_insn (), 0);
7445 /* Emit code to save the current value of stack. */
7447 static rtx
7448 expand_stack_save (void)
7450 rtx ret = NULL_RTX;
7452 emit_stack_save (SAVE_BLOCK, &ret);
7453 return ret;
7456 /* Emit code to get the openacc gang, worker or vector id or size. */
7458 static rtx
7459 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
7461 const char *name;
7462 rtx fallback_retval;
7463 rtx_insn *(*gen_fn) (rtx, rtx);
7464 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
7466 case BUILT_IN_GOACC_PARLEVEL_ID:
7467 name = "__builtin_goacc_parlevel_id";
7468 fallback_retval = const0_rtx;
7469 gen_fn = targetm.gen_oacc_dim_pos;
7470 break;
7471 case BUILT_IN_GOACC_PARLEVEL_SIZE:
7472 name = "__builtin_goacc_parlevel_size";
7473 fallback_retval = const1_rtx;
7474 gen_fn = targetm.gen_oacc_dim_size;
7475 break;
7476 default:
7477 gcc_unreachable ();
7480 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
7482 error ("%qs only supported in OpenACC code", name);
7483 return const0_rtx;
7486 tree arg = CALL_EXPR_ARG (exp, 0);
7487 if (TREE_CODE (arg) != INTEGER_CST)
7489 error ("non-constant argument 0 to %qs", name);
7490 return const0_rtx;
7493 int dim = TREE_INT_CST_LOW (arg);
7494 switch (dim)
7496 case GOMP_DIM_GANG:
7497 case GOMP_DIM_WORKER:
7498 case GOMP_DIM_VECTOR:
7499 break;
7500 default:
7501 error ("illegal argument 0 to %qs", name);
7502 return const0_rtx;
7505 if (ignore)
7506 return target;
7508 if (target == NULL_RTX)
7509 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7511 if (!targetm.have_oacc_dim_size ())
7513 emit_move_insn (target, fallback_retval);
7514 return target;
7517 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
7518 emit_insn (gen_fn (reg, GEN_INT (dim)));
7519 if (reg != target)
7520 emit_move_insn (target, reg);
7522 return target;
7525 /* Expand a string compare operation using a sequence of char comparison
7526 to get rid of the calling overhead, with result going to TARGET if
7527 that's convenient.
7529 VAR_STR is the variable string source;
7530 CONST_STR is the constant string source;
7531 LENGTH is the number of chars to compare;
7532 CONST_STR_N indicates which source string is the constant string;
7533 IS_MEMCMP indicates whether it's a memcmp or strcmp.
7535 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7537 target = (int) (unsigned char) var_str[0]
7538 - (int) (unsigned char) const_str[0];
7539 if (target != 0)
7540 goto ne_label;
7542 target = (int) (unsigned char) var_str[length - 2]
7543 - (int) (unsigned char) const_str[length - 2];
7544 if (target != 0)
7545 goto ne_label;
7546 target = (int) (unsigned char) var_str[length - 1]
7547 - (int) (unsigned char) const_str[length - 1];
7548 ne_label:
7551 static rtx
7552 inline_string_cmp (rtx target, tree var_str, const char *const_str,
7553 unsigned HOST_WIDE_INT length,
7554 int const_str_n, machine_mode mode)
7556 HOST_WIDE_INT offset = 0;
7557 rtx var_rtx_array
7558 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
7559 rtx var_rtx = NULL_RTX;
7560 rtx const_rtx = NULL_RTX;
7561 rtx result = target ? target : gen_reg_rtx (mode);
7562 rtx_code_label *ne_label = gen_label_rtx ();
7563 tree unit_type_node = unsigned_char_type_node;
7564 scalar_int_mode unit_mode
7565 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
7567 start_sequence ();
7569 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
7571 var_rtx
7572 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
7573 const_rtx = c_readstr (const_str + offset, unit_mode);
7574 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
7575 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
7577 op0 = convert_modes (mode, unit_mode, op0, 1);
7578 op1 = convert_modes (mode, unit_mode, op1, 1);
7579 rtx diff = expand_simple_binop (mode, MINUS, op0, op1,
7580 result, 1, OPTAB_WIDEN);
7582 /* Force the difference into result register. We cannot reassign
7583 result here ("result = diff") or we may end up returning
7584 uninitialized result when expand_simple_binop allocates a new
7585 pseudo-register for returning. */
7586 if (diff != result)
7587 emit_move_insn (result, diff);
7589 if (i < length - 1)
7590 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
7591 mode, true, ne_label);
7592 offset += GET_MODE_SIZE (unit_mode);
7595 emit_label (ne_label);
7596 rtx_insn *insns = get_insns ();
7597 end_sequence ();
7598 emit_insn (insns);
7600 return result;
7603 /* Inline expansion of a call to str(n)cmp and memcmp, with result going
7604 to TARGET if that's convenient.
7605 If the call is not been inlined, return NULL_RTX. */
7607 static rtx
7608 inline_expand_builtin_bytecmp (tree exp, rtx target)
7610 tree fndecl = get_callee_fndecl (exp);
7611 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7612 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
7614 /* Do NOT apply this inlining expansion when optimizing for size or
7615 optimization level below 2 or if unused *cmp hasn't been DCEd. */
7616 if (optimize < 2 || optimize_insn_for_size_p () || target == const0_rtx)
7617 return NULL_RTX;
7619 gcc_checking_assert (fcode == BUILT_IN_STRCMP
7620 || fcode == BUILT_IN_STRNCMP
7621 || fcode == BUILT_IN_MEMCMP);
7623 /* On a target where the type of the call (int) has same or narrower presicion
7624 than unsigned char, give up the inlining expansion. */
7625 if (TYPE_PRECISION (unsigned_char_type_node)
7626 >= TYPE_PRECISION (TREE_TYPE (exp)))
7627 return NULL_RTX;
7629 tree arg1 = CALL_EXPR_ARG (exp, 0);
7630 tree arg2 = CALL_EXPR_ARG (exp, 1);
7631 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
7633 unsigned HOST_WIDE_INT len1 = 0;
7634 unsigned HOST_WIDE_INT len2 = 0;
7635 unsigned HOST_WIDE_INT len3 = 0;
7637 /* Get the object representation of the initializers of ARG1 and ARG2
7638 as strings, provided they refer to constant objects, with their byte
7639 sizes in LEN1 and LEN2, respectively. */
7640 const char *bytes1 = getbyterep (arg1, &len1);
7641 const char *bytes2 = getbyterep (arg2, &len2);
7643 /* Fail if neither argument refers to an initialized constant. */
7644 if (!bytes1 && !bytes2)
7645 return NULL_RTX;
7647 if (is_ncmp)
7649 /* Fail if the memcmp/strncmp bound is not a constant. */
7650 if (!tree_fits_uhwi_p (len3_tree))
7651 return NULL_RTX;
7653 len3 = tree_to_uhwi (len3_tree);
7655 if (fcode == BUILT_IN_MEMCMP)
7657 /* Fail if the memcmp bound is greater than the size of either
7658 of the two constant objects. */
7659 if ((bytes1 && len1 < len3)
7660 || (bytes2 && len2 < len3))
7661 return NULL_RTX;
7665 if (fcode != BUILT_IN_MEMCMP)
7667 /* For string functions (i.e., strcmp and strncmp) reduce LEN1
7668 and LEN2 to the length of the nul-terminated string stored
7669 in each. */
7670 if (bytes1 != NULL)
7671 len1 = strnlen (bytes1, len1) + 1;
7672 if (bytes2 != NULL)
7673 len2 = strnlen (bytes2, len2) + 1;
7676 /* See inline_string_cmp. */
7677 int const_str_n;
7678 if (!len1)
7679 const_str_n = 2;
7680 else if (!len2)
7681 const_str_n = 1;
7682 else if (len2 > len1)
7683 const_str_n = 1;
7684 else
7685 const_str_n = 2;
7687 /* For strncmp only, compute the new bound as the smallest of
7688 the lengths of the two strings (plus 1) and the bound provided
7689 to the function. */
7690 unsigned HOST_WIDE_INT bound = (const_str_n == 1) ? len1 : len2;
7691 if (is_ncmp && len3 < bound)
7692 bound = len3;
7694 /* If the bound of the comparison is larger than the threshold,
7695 do nothing. */
7696 if (bound > (unsigned HOST_WIDE_INT) param_builtin_string_cmp_inline_length)
7697 return NULL_RTX;
7699 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7701 /* Now, start inline expansion the call. */
7702 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
7703 (const_str_n == 1) ? bytes1 : bytes2, bound,
7704 const_str_n, mode);
7707 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7708 represents the size of the first argument to that call, or VOIDmode
7709 if the argument is a pointer. IGNORE will be true if the result
7710 isn't used. */
7711 static rtx
7712 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
7713 bool ignore)
7715 rtx val, failsafe;
7716 unsigned nargs = call_expr_nargs (exp);
7718 tree arg0 = CALL_EXPR_ARG (exp, 0);
7720 if (mode == VOIDmode)
7722 mode = TYPE_MODE (TREE_TYPE (arg0));
7723 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
7726 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
7728 /* An optional second argument can be used as a failsafe value on
7729 some machines. If it isn't present, then the failsafe value is
7730 assumed to be 0. */
7731 if (nargs > 1)
7733 tree arg1 = CALL_EXPR_ARG (exp, 1);
7734 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
7736 else
7737 failsafe = const0_rtx;
7739 /* If the result isn't used, the behavior is undefined. It would be
7740 nice to emit a warning here, but path splitting means this might
7741 happen with legitimate code. So simply drop the builtin
7742 expansion in that case; we've handled any side-effects above. */
7743 if (ignore)
7744 return const0_rtx;
7746 /* If we don't have a suitable target, create one to hold the result. */
7747 if (target == NULL || GET_MODE (target) != mode)
7748 target = gen_reg_rtx (mode);
7750 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7751 val = convert_modes (mode, VOIDmode, val, false);
7753 return targetm.speculation_safe_value (mode, target, val, failsafe);
7756 /* Expand an expression EXP that calls a built-in function,
7757 with result going to TARGET if that's convenient
7758 (and in mode MODE if that's convenient).
7759 SUBTARGET may be used as the target for computing one of EXP's operands.
7760 IGNORE is nonzero if the value is to be ignored. */
7763 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7764 int ignore)
7766 tree fndecl = get_callee_fndecl (exp);
7767 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7768 int flags;
7770 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7771 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7773 /* When ASan is enabled, we don't want to expand some memory/string
7774 builtins and rely on libsanitizer's hooks. This allows us to avoid
7775 redundant checks and be sure, that possible overflow will be detected
7776 by ASan. */
7778 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7779 if (param_asan_kernel_mem_intrinsic_prefix
7780 && sanitize_flags_p (SANITIZE_KERNEL_ADDRESS
7781 | SANITIZE_KERNEL_HWADDRESS))
7782 switch (fcode)
7784 rtx save_decl_rtl, ret;
7785 case BUILT_IN_MEMCPY:
7786 case BUILT_IN_MEMMOVE:
7787 case BUILT_IN_MEMSET:
7788 save_decl_rtl = DECL_RTL (fndecl);
7789 DECL_RTL (fndecl) = asan_memfn_rtl (fndecl);
7790 ret = expand_call (exp, target, ignore);
7791 DECL_RTL (fndecl) = save_decl_rtl;
7792 return ret;
7793 default:
7794 break;
7796 if (sanitize_flags_p (SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7797 return expand_call (exp, target, ignore);
7799 /* When not optimizing, generate calls to library functions for a certain
7800 set of builtins. */
7801 if (!optimize
7802 && !called_as_built_in (fndecl)
7803 && fcode != BUILT_IN_FORK
7804 && fcode != BUILT_IN_EXECL
7805 && fcode != BUILT_IN_EXECV
7806 && fcode != BUILT_IN_EXECLP
7807 && fcode != BUILT_IN_EXECLE
7808 && fcode != BUILT_IN_EXECVP
7809 && fcode != BUILT_IN_EXECVE
7810 && fcode != BUILT_IN_CLEAR_CACHE
7811 && !ALLOCA_FUNCTION_CODE_P (fcode)
7812 && fcode != BUILT_IN_FREE
7813 && (fcode != BUILT_IN_MEMSET
7814 || !(flag_inline_stringops & ILSOP_MEMSET))
7815 && (fcode != BUILT_IN_MEMCPY
7816 || !(flag_inline_stringops & ILSOP_MEMCPY))
7817 && (fcode != BUILT_IN_MEMMOVE
7818 || !(flag_inline_stringops & ILSOP_MEMMOVE))
7819 && (fcode != BUILT_IN_MEMCMP
7820 || !(flag_inline_stringops & ILSOP_MEMCMP)))
7821 return expand_call (exp, target, ignore);
7823 /* The built-in function expanders test for target == const0_rtx
7824 to determine whether the function's result will be ignored. */
7825 if (ignore)
7826 target = const0_rtx;
7828 /* If the result of a pure or const built-in function is ignored, and
7829 none of its arguments are volatile, we can avoid expanding the
7830 built-in call and just evaluate the arguments for side-effects. */
7831 if (target == const0_rtx
7832 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7833 && !(flags & ECF_LOOPING_CONST_OR_PURE))
7835 bool volatilep = false;
7836 tree arg;
7837 call_expr_arg_iterator iter;
7839 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7840 if (TREE_THIS_VOLATILE (arg))
7842 volatilep = true;
7843 break;
7846 if (! volatilep)
7848 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7849 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7850 return const0_rtx;
7854 switch (fcode)
7856 CASE_FLT_FN (BUILT_IN_FABS):
7857 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7858 case BUILT_IN_FABSD32:
7859 case BUILT_IN_FABSD64:
7860 case BUILT_IN_FABSD128:
7861 target = expand_builtin_fabs (exp, target, subtarget);
7862 if (target)
7863 return target;
7864 break;
7866 CASE_FLT_FN (BUILT_IN_COPYSIGN):
7867 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7868 target = expand_builtin_copysign (exp, target, subtarget);
7869 if (target)
7870 return target;
7871 break;
7873 /* Just do a normal library call if we were unable to fold
7874 the values. */
7875 CASE_FLT_FN (BUILT_IN_CABS):
7876 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CABS):
7877 break;
7879 CASE_FLT_FN (BUILT_IN_FMA):
7880 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7881 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7882 if (target)
7883 return target;
7884 break;
7886 CASE_FLT_FN (BUILT_IN_ILOGB):
7887 if (! flag_unsafe_math_optimizations)
7888 break;
7889 gcc_fallthrough ();
7890 CASE_FLT_FN (BUILT_IN_ISINF):
7891 CASE_FLT_FN (BUILT_IN_FINITE):
7892 case BUILT_IN_ISFINITE:
7893 case BUILT_IN_ISNORMAL:
7894 target = expand_builtin_interclass_mathfn (exp, target);
7895 if (target)
7896 return target;
7897 break;
7899 case BUILT_IN_ISSIGNALING:
7900 target = expand_builtin_issignaling (exp, target);
7901 if (target)
7902 return target;
7903 break;
7905 CASE_FLT_FN (BUILT_IN_ICEIL):
7906 CASE_FLT_FN (BUILT_IN_LCEIL):
7907 CASE_FLT_FN (BUILT_IN_LLCEIL):
7908 CASE_FLT_FN (BUILT_IN_LFLOOR):
7909 CASE_FLT_FN (BUILT_IN_IFLOOR):
7910 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7911 target = expand_builtin_int_roundingfn (exp, target);
7912 if (target)
7913 return target;
7914 break;
7916 CASE_FLT_FN (BUILT_IN_IRINT):
7917 CASE_FLT_FN (BUILT_IN_LRINT):
7918 CASE_FLT_FN (BUILT_IN_LLRINT):
7919 CASE_FLT_FN (BUILT_IN_IROUND):
7920 CASE_FLT_FN (BUILT_IN_LROUND):
7921 CASE_FLT_FN (BUILT_IN_LLROUND):
7922 target = expand_builtin_int_roundingfn_2 (exp, target);
7923 if (target)
7924 return target;
7925 break;
7927 CASE_FLT_FN (BUILT_IN_POWI):
7928 target = expand_builtin_powi (exp, target);
7929 if (target)
7930 return target;
7931 break;
7933 CASE_FLT_FN (BUILT_IN_CEXPI):
7934 target = expand_builtin_cexpi (exp, target);
7935 gcc_assert (target);
7936 return target;
7938 CASE_FLT_FN (BUILT_IN_SIN):
7939 CASE_FLT_FN (BUILT_IN_COS):
7940 if (! flag_unsafe_math_optimizations)
7941 break;
7942 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7943 if (target)
7944 return target;
7945 break;
7947 CASE_FLT_FN (BUILT_IN_SINCOS):
7948 if (! flag_unsafe_math_optimizations)
7949 break;
7950 target = expand_builtin_sincos (exp);
7951 if (target)
7952 return target;
7953 break;
7955 case BUILT_IN_FEGETROUND:
7956 target = expand_builtin_fegetround (exp, target, target_mode);
7957 if (target)
7958 return target;
7959 break;
7961 case BUILT_IN_FECLEAREXCEPT:
7962 target = expand_builtin_feclear_feraise_except (exp, target, target_mode,
7963 feclearexcept_optab);
7964 if (target)
7965 return target;
7966 break;
7968 case BUILT_IN_FERAISEEXCEPT:
7969 target = expand_builtin_feclear_feraise_except (exp, target, target_mode,
7970 feraiseexcept_optab);
7971 if (target)
7972 return target;
7973 break;
7975 case BUILT_IN_APPLY_ARGS:
7976 return expand_builtin_apply_args ();
7978 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7979 FUNCTION with a copy of the parameters described by
7980 ARGUMENTS, and ARGSIZE. It returns a block of memory
7981 allocated on the stack into which is stored all the registers
7982 that might possibly be used for returning the result of a
7983 function. ARGUMENTS is the value returned by
7984 __builtin_apply_args. ARGSIZE is the number of bytes of
7985 arguments that must be copied. ??? How should this value be
7986 computed? We'll also need a safe worst case value for varargs
7987 functions. */
7988 case BUILT_IN_APPLY:
7989 if (!validate_arglist (exp, POINTER_TYPE,
7990 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7991 && !validate_arglist (exp, REFERENCE_TYPE,
7992 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7993 return const0_rtx;
7994 else
7996 rtx ops[3];
7998 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7999 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
8000 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
8002 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8005 /* __builtin_return (RESULT) causes the function to return the
8006 value described by RESULT. RESULT is address of the block of
8007 memory returned by __builtin_apply. */
8008 case BUILT_IN_RETURN:
8009 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8010 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
8011 return const0_rtx;
8013 case BUILT_IN_SAVEREGS:
8014 return expand_builtin_saveregs ();
8016 case BUILT_IN_VA_ARG_PACK:
8017 /* All valid uses of __builtin_va_arg_pack () are removed during
8018 inlining. */
8019 error ("invalid use of %<__builtin_va_arg_pack ()%>");
8020 return const0_rtx;
8022 case BUILT_IN_VA_ARG_PACK_LEN:
8023 /* All valid uses of __builtin_va_arg_pack_len () are removed during
8024 inlining. */
8025 error ("invalid use of %<__builtin_va_arg_pack_len ()%>");
8026 return const0_rtx;
8028 /* Return the address of the first anonymous stack arg. */
8029 case BUILT_IN_NEXT_ARG:
8030 if (fold_builtin_next_arg (exp, false))
8031 return const0_rtx;
8032 return expand_builtin_next_arg ();
8034 case BUILT_IN_CLEAR_CACHE:
8035 expand_builtin___clear_cache (exp);
8036 return const0_rtx;
8038 case BUILT_IN_CLASSIFY_TYPE:
8039 return expand_builtin_classify_type (exp);
8041 case BUILT_IN_CONSTANT_P:
8042 return const0_rtx;
8044 case BUILT_IN_FRAME_ADDRESS:
8045 case BUILT_IN_RETURN_ADDRESS:
8046 return expand_builtin_frame_address (fndecl, exp);
8048 case BUILT_IN_STACK_ADDRESS:
8049 return expand_builtin_stack_address ();
8051 case BUILT_IN___STRUB_ENTER:
8052 target = expand_builtin_strub_enter (exp);
8053 if (target)
8054 return target;
8055 break;
8057 case BUILT_IN___STRUB_UPDATE:
8058 target = expand_builtin_strub_update (exp);
8059 if (target)
8060 return target;
8061 break;
8063 case BUILT_IN___STRUB_LEAVE:
8064 target = expand_builtin_strub_leave (exp);
8065 if (target)
8066 return target;
8067 break;
8069 /* Returns the address of the area where the structure is returned.
8070 0 otherwise. */
8071 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
8072 if (call_expr_nargs (exp) != 0
8073 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
8074 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
8075 return const0_rtx;
8076 else
8077 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
8079 CASE_BUILT_IN_ALLOCA:
8080 target = expand_builtin_alloca (exp);
8081 if (target)
8082 return target;
8083 break;
8085 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
8086 return expand_asan_emit_allocas_unpoison (exp);
8088 case BUILT_IN_STACK_SAVE:
8089 return expand_stack_save ();
8091 case BUILT_IN_STACK_RESTORE:
8092 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
8093 return const0_rtx;
8095 case BUILT_IN_BSWAP16:
8096 case BUILT_IN_BSWAP32:
8097 case BUILT_IN_BSWAP64:
8098 case BUILT_IN_BSWAP128:
8099 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
8100 if (target)
8101 return target;
8102 break;
8104 CASE_INT_FN (BUILT_IN_FFS):
8105 target = expand_builtin_unop (target_mode, exp, target,
8106 subtarget, ffs_optab);
8107 if (target)
8108 return target;
8109 break;
8111 CASE_INT_FN (BUILT_IN_CLZ):
8112 target = expand_builtin_unop (target_mode, exp, target,
8113 subtarget, clz_optab);
8114 if (target)
8115 return target;
8116 break;
8118 CASE_INT_FN (BUILT_IN_CTZ):
8119 target = expand_builtin_unop (target_mode, exp, target,
8120 subtarget, ctz_optab);
8121 if (target)
8122 return target;
8123 break;
8125 CASE_INT_FN (BUILT_IN_CLRSB):
8126 target = expand_builtin_unop (target_mode, exp, target,
8127 subtarget, clrsb_optab);
8128 if (target)
8129 return target;
8130 break;
8132 CASE_INT_FN (BUILT_IN_POPCOUNT):
8133 target = expand_builtin_unop (target_mode, exp, target,
8134 subtarget, popcount_optab);
8135 if (target)
8136 return target;
8137 break;
8139 CASE_INT_FN (BUILT_IN_PARITY):
8140 target = expand_builtin_unop (target_mode, exp, target,
8141 subtarget, parity_optab);
8142 if (target)
8143 return target;
8144 break;
8146 case BUILT_IN_STRLEN:
8147 target = expand_builtin_strlen (exp, target, target_mode);
8148 if (target)
8149 return target;
8150 break;
8152 case BUILT_IN_STRNLEN:
8153 target = expand_builtin_strnlen (exp, target, target_mode);
8154 if (target)
8155 return target;
8156 break;
8158 case BUILT_IN_STRCPY:
8159 target = expand_builtin_strcpy (exp, target);
8160 if (target)
8161 return target;
8162 break;
8164 case BUILT_IN_STRNCPY:
8165 target = expand_builtin_strncpy (exp, target);
8166 if (target)
8167 return target;
8168 break;
8170 case BUILT_IN_STPCPY:
8171 target = expand_builtin_stpcpy (exp, target, mode);
8172 if (target)
8173 return target;
8174 break;
8176 case BUILT_IN_MEMCPY:
8177 target = expand_builtin_memcpy (exp, target);
8178 if (target)
8179 return target;
8180 break;
8182 case BUILT_IN_MEMMOVE:
8183 target = expand_builtin_memmove (exp, target);
8184 if (target)
8185 return target;
8186 break;
8188 case BUILT_IN_MEMPCPY:
8189 target = expand_builtin_mempcpy (exp, target);
8190 if (target)
8191 return target;
8192 break;
8194 case BUILT_IN_MEMSET:
8195 target = expand_builtin_memset (exp, target, mode);
8196 if (target)
8197 return target;
8198 break;
8200 case BUILT_IN_BZERO:
8201 target = expand_builtin_bzero (exp);
8202 if (target)
8203 return target;
8204 break;
8206 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
8207 back to a BUILT_IN_STRCMP. Remember to delete the 3rd parameter
8208 when changing it to a strcmp call. */
8209 case BUILT_IN_STRCMP_EQ:
8210 target = expand_builtin_memcmp (exp, target, true);
8211 if (target)
8212 return target;
8214 /* Change this call back to a BUILT_IN_STRCMP. */
8215 TREE_OPERAND (exp, 1)
8216 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
8218 /* Delete the last parameter. */
8219 unsigned int i;
8220 vec<tree, va_gc> *arg_vec;
8221 vec_alloc (arg_vec, 2);
8222 for (i = 0; i < 2; i++)
8223 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
8224 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
8225 /* FALLTHROUGH */
8227 case BUILT_IN_STRCMP:
8228 target = expand_builtin_strcmp (exp, target);
8229 if (target)
8230 return target;
8231 break;
8233 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
8234 back to a BUILT_IN_STRNCMP. */
8235 case BUILT_IN_STRNCMP_EQ:
8236 target = expand_builtin_memcmp (exp, target, true);
8237 if (target)
8238 return target;
8240 /* Change it back to a BUILT_IN_STRNCMP. */
8241 TREE_OPERAND (exp, 1)
8242 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
8243 /* FALLTHROUGH */
8245 case BUILT_IN_STRNCMP:
8246 target = expand_builtin_strncmp (exp, target, mode);
8247 if (target)
8248 return target;
8249 break;
8251 case BUILT_IN_BCMP:
8252 case BUILT_IN_MEMCMP:
8253 case BUILT_IN_MEMCMP_EQ:
8254 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
8255 if (target)
8256 return target;
8257 if (fcode == BUILT_IN_MEMCMP_EQ)
8259 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
8260 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
8262 break;
8264 case BUILT_IN_SETJMP:
8265 /* This should have been lowered to the builtins below. */
8266 gcc_unreachable ();
8268 case BUILT_IN_SETJMP_SETUP:
8269 /* __builtin_setjmp_setup is passed a pointer to an array of five words
8270 and the receiver label. */
8271 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8273 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
8274 VOIDmode, EXPAND_NORMAL);
8275 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
8276 rtx_insn *label_r = label_rtx (label);
8278 expand_builtin_setjmp_setup (buf_addr, label_r);
8279 return const0_rtx;
8281 break;
8283 case BUILT_IN_SETJMP_RECEIVER:
8284 /* __builtin_setjmp_receiver is passed the receiver label. */
8285 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8287 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
8288 rtx_insn *label_r = label_rtx (label);
8290 expand_builtin_setjmp_receiver (label_r);
8291 nonlocal_goto_handler_labels
8292 = gen_rtx_INSN_LIST (VOIDmode, label_r,
8293 nonlocal_goto_handler_labels);
8294 /* ??? Do not let expand_label treat us as such since we would
8295 not want to be both on the list of non-local labels and on
8296 the list of forced labels. */
8297 FORCED_LABEL (label) = 0;
8298 return const0_rtx;
8300 break;
8302 /* __builtin_longjmp is passed a pointer to an array of five words.
8303 It's similar to the C library longjmp function but works with
8304 __builtin_setjmp above. */
8305 case BUILT_IN_LONGJMP:
8306 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8308 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
8309 VOIDmode, EXPAND_NORMAL);
8310 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
8312 if (value != const1_rtx)
8314 error ("%<__builtin_longjmp%> second argument must be 1");
8315 return const0_rtx;
8318 expand_builtin_longjmp (buf_addr, value);
8319 return const0_rtx;
8321 break;
8323 case BUILT_IN_NONLOCAL_GOTO:
8324 target = expand_builtin_nonlocal_goto (exp);
8325 if (target)
8326 return target;
8327 break;
8329 /* This updates the setjmp buffer that is its argument with the value
8330 of the current stack pointer. */
8331 case BUILT_IN_UPDATE_SETJMP_BUF:
8332 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8334 rtx buf_addr
8335 = expand_normal (CALL_EXPR_ARG (exp, 0));
8337 expand_builtin_update_setjmp_buf (buf_addr);
8338 return const0_rtx;
8340 break;
8342 case BUILT_IN_TRAP:
8343 case BUILT_IN_UNREACHABLE_TRAP:
8344 expand_builtin_trap ();
8345 return const0_rtx;
8347 case BUILT_IN_UNREACHABLE:
8348 expand_builtin_unreachable ();
8349 return const0_rtx;
8351 CASE_FLT_FN (BUILT_IN_SIGNBIT):
8352 case BUILT_IN_SIGNBITD32:
8353 case BUILT_IN_SIGNBITD64:
8354 case BUILT_IN_SIGNBITD128:
8355 target = expand_builtin_signbit (exp, target);
8356 if (target)
8357 return target;
8358 break;
8360 /* Various hooks for the DWARF 2 __throw routine. */
8361 case BUILT_IN_UNWIND_INIT:
8362 expand_builtin_unwind_init ();
8363 return const0_rtx;
8364 case BUILT_IN_DWARF_CFA:
8365 return virtual_cfa_rtx;
8366 #ifdef DWARF2_UNWIND_INFO
8367 case BUILT_IN_DWARF_SP_COLUMN:
8368 return expand_builtin_dwarf_sp_column ();
8369 case BUILT_IN_INIT_DWARF_REG_SIZES:
8370 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
8371 return const0_rtx;
8372 #endif
8373 case BUILT_IN_FROB_RETURN_ADDR:
8374 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
8375 case BUILT_IN_EXTRACT_RETURN_ADDR:
8376 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
8377 case BUILT_IN_EH_RETURN:
8378 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
8379 CALL_EXPR_ARG (exp, 1));
8380 return const0_rtx;
8381 case BUILT_IN_EH_RETURN_DATA_REGNO:
8382 return expand_builtin_eh_return_data_regno (exp);
8383 case BUILT_IN_EXTEND_POINTER:
8384 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
8385 case BUILT_IN_EH_POINTER:
8386 return expand_builtin_eh_pointer (exp);
8387 case BUILT_IN_EH_FILTER:
8388 return expand_builtin_eh_filter (exp);
8389 case BUILT_IN_EH_COPY_VALUES:
8390 return expand_builtin_eh_copy_values (exp);
8392 case BUILT_IN_VA_START:
8393 return expand_builtin_va_start (exp);
8394 case BUILT_IN_VA_END:
8395 return expand_builtin_va_end (exp);
8396 case BUILT_IN_VA_COPY:
8397 return expand_builtin_va_copy (exp);
8398 case BUILT_IN_EXPECT:
8399 return expand_builtin_expect (exp, target);
8400 case BUILT_IN_EXPECT_WITH_PROBABILITY:
8401 return expand_builtin_expect_with_probability (exp, target);
8402 case BUILT_IN_ASSUME_ALIGNED:
8403 return expand_builtin_assume_aligned (exp, target);
8404 case BUILT_IN_PREFETCH:
8405 expand_builtin_prefetch (exp);
8406 return const0_rtx;
8408 case BUILT_IN_INIT_TRAMPOLINE:
8409 return expand_builtin_init_trampoline (exp, true);
8410 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
8411 return expand_builtin_init_trampoline (exp, false);
8412 case BUILT_IN_ADJUST_TRAMPOLINE:
8413 return expand_builtin_adjust_trampoline (exp);
8415 case BUILT_IN_INIT_DESCRIPTOR:
8416 return expand_builtin_init_descriptor (exp);
8417 case BUILT_IN_ADJUST_DESCRIPTOR:
8418 return expand_builtin_adjust_descriptor (exp);
8420 case BUILT_IN_FORK:
8421 case BUILT_IN_EXECL:
8422 case BUILT_IN_EXECV:
8423 case BUILT_IN_EXECLP:
8424 case BUILT_IN_EXECLE:
8425 case BUILT_IN_EXECVP:
8426 case BUILT_IN_EXECVE:
8427 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
8428 if (target)
8429 return target;
8430 break;
8432 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
8433 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
8434 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
8435 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
8436 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
8437 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
8438 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
8439 if (target)
8440 return target;
8441 break;
8443 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
8444 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
8445 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
8446 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
8447 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
8448 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
8449 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
8450 if (target)
8451 return target;
8452 break;
8454 case BUILT_IN_SYNC_FETCH_AND_OR_1:
8455 case BUILT_IN_SYNC_FETCH_AND_OR_2:
8456 case BUILT_IN_SYNC_FETCH_AND_OR_4:
8457 case BUILT_IN_SYNC_FETCH_AND_OR_8:
8458 case BUILT_IN_SYNC_FETCH_AND_OR_16:
8459 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
8460 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
8461 if (target)
8462 return target;
8463 break;
8465 case BUILT_IN_SYNC_FETCH_AND_AND_1:
8466 case BUILT_IN_SYNC_FETCH_AND_AND_2:
8467 case BUILT_IN_SYNC_FETCH_AND_AND_4:
8468 case BUILT_IN_SYNC_FETCH_AND_AND_8:
8469 case BUILT_IN_SYNC_FETCH_AND_AND_16:
8470 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
8471 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
8472 if (target)
8473 return target;
8474 break;
8476 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
8477 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
8478 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
8479 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
8480 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
8481 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
8482 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
8483 if (target)
8484 return target;
8485 break;
8487 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
8488 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
8489 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
8490 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
8491 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
8492 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
8493 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
8494 if (target)
8495 return target;
8496 break;
8498 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
8499 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
8500 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
8501 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
8502 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
8503 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
8504 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
8505 if (target)
8506 return target;
8507 break;
8509 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
8510 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
8511 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
8512 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
8513 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
8514 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
8515 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
8516 if (target)
8517 return target;
8518 break;
8520 case BUILT_IN_SYNC_OR_AND_FETCH_1:
8521 case BUILT_IN_SYNC_OR_AND_FETCH_2:
8522 case BUILT_IN_SYNC_OR_AND_FETCH_4:
8523 case BUILT_IN_SYNC_OR_AND_FETCH_8:
8524 case BUILT_IN_SYNC_OR_AND_FETCH_16:
8525 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
8526 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
8527 if (target)
8528 return target;
8529 break;
8531 case BUILT_IN_SYNC_AND_AND_FETCH_1:
8532 case BUILT_IN_SYNC_AND_AND_FETCH_2:
8533 case BUILT_IN_SYNC_AND_AND_FETCH_4:
8534 case BUILT_IN_SYNC_AND_AND_FETCH_8:
8535 case BUILT_IN_SYNC_AND_AND_FETCH_16:
8536 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
8537 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
8538 if (target)
8539 return target;
8540 break;
8542 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
8543 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
8544 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
8545 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
8546 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
8547 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
8548 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
8549 if (target)
8550 return target;
8551 break;
8553 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
8554 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
8555 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
8556 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
8557 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
8558 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
8559 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
8560 if (target)
8561 return target;
8562 break;
8564 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
8565 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
8566 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
8567 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
8568 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
8569 if (mode == VOIDmode)
8570 mode = TYPE_MODE (boolean_type_node);
8571 if (!target || !register_operand (target, mode))
8572 target = gen_reg_rtx (mode);
8574 mode = get_builtin_sync_mode
8575 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
8576 target = expand_builtin_compare_and_swap (mode, exp, true, target);
8577 if (target)
8578 return target;
8579 break;
8581 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
8582 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
8583 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
8584 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
8585 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
8586 mode = get_builtin_sync_mode
8587 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
8588 target = expand_builtin_compare_and_swap (mode, exp, false, target);
8589 if (target)
8590 return target;
8591 break;
8593 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
8594 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
8595 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
8596 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
8597 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
8598 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
8599 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
8600 if (target)
8601 return target;
8602 break;
8604 case BUILT_IN_SYNC_LOCK_RELEASE_1:
8605 case BUILT_IN_SYNC_LOCK_RELEASE_2:
8606 case BUILT_IN_SYNC_LOCK_RELEASE_4:
8607 case BUILT_IN_SYNC_LOCK_RELEASE_8:
8608 case BUILT_IN_SYNC_LOCK_RELEASE_16:
8609 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
8610 expand_builtin_sync_lock_release (mode, exp);
8611 return const0_rtx;
8613 case BUILT_IN_SYNC_SYNCHRONIZE:
8614 expand_builtin_sync_synchronize ();
8615 return const0_rtx;
8617 case BUILT_IN_ATOMIC_EXCHANGE_1:
8618 case BUILT_IN_ATOMIC_EXCHANGE_2:
8619 case BUILT_IN_ATOMIC_EXCHANGE_4:
8620 case BUILT_IN_ATOMIC_EXCHANGE_8:
8621 case BUILT_IN_ATOMIC_EXCHANGE_16:
8622 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
8623 target = expand_builtin_atomic_exchange (mode, exp, target);
8624 if (target)
8625 return target;
8626 break;
8628 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
8629 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
8630 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
8631 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
8632 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
8634 unsigned int nargs, z;
8635 vec<tree, va_gc> *vec;
8637 mode =
8638 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
8639 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
8640 if (target)
8641 return target;
8643 /* If this is turned into an external library call, the weak parameter
8644 must be dropped to match the expected parameter list. */
8645 nargs = call_expr_nargs (exp);
8646 vec_alloc (vec, nargs - 1);
8647 for (z = 0; z < 3; z++)
8648 vec->quick_push (CALL_EXPR_ARG (exp, z));
8649 /* Skip the boolean weak parameter. */
8650 for (z = 4; z < 6; z++)
8651 vec->quick_push (CALL_EXPR_ARG (exp, z));
8652 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
8653 break;
8656 case BUILT_IN_ATOMIC_LOAD_1:
8657 case BUILT_IN_ATOMIC_LOAD_2:
8658 case BUILT_IN_ATOMIC_LOAD_4:
8659 case BUILT_IN_ATOMIC_LOAD_8:
8660 case BUILT_IN_ATOMIC_LOAD_16:
8661 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
8662 target = expand_builtin_atomic_load (mode, exp, target);
8663 if (target)
8664 return target;
8665 break;
8667 case BUILT_IN_ATOMIC_STORE_1:
8668 case BUILT_IN_ATOMIC_STORE_2:
8669 case BUILT_IN_ATOMIC_STORE_4:
8670 case BUILT_IN_ATOMIC_STORE_8:
8671 case BUILT_IN_ATOMIC_STORE_16:
8672 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
8673 target = expand_builtin_atomic_store (mode, exp);
8674 if (target)
8675 return const0_rtx;
8676 break;
8678 case BUILT_IN_ATOMIC_ADD_FETCH_1:
8679 case BUILT_IN_ATOMIC_ADD_FETCH_2:
8680 case BUILT_IN_ATOMIC_ADD_FETCH_4:
8681 case BUILT_IN_ATOMIC_ADD_FETCH_8:
8682 case BUILT_IN_ATOMIC_ADD_FETCH_16:
8684 enum built_in_function lib;
8685 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
8686 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
8687 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
8688 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
8689 ignore, lib);
8690 if (target)
8691 return target;
8692 break;
8694 case BUILT_IN_ATOMIC_SUB_FETCH_1:
8695 case BUILT_IN_ATOMIC_SUB_FETCH_2:
8696 case BUILT_IN_ATOMIC_SUB_FETCH_4:
8697 case BUILT_IN_ATOMIC_SUB_FETCH_8:
8698 case BUILT_IN_ATOMIC_SUB_FETCH_16:
8700 enum built_in_function lib;
8701 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
8702 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
8703 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
8704 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
8705 ignore, lib);
8706 if (target)
8707 return target;
8708 break;
8710 case BUILT_IN_ATOMIC_AND_FETCH_1:
8711 case BUILT_IN_ATOMIC_AND_FETCH_2:
8712 case BUILT_IN_ATOMIC_AND_FETCH_4:
8713 case BUILT_IN_ATOMIC_AND_FETCH_8:
8714 case BUILT_IN_ATOMIC_AND_FETCH_16:
8716 enum built_in_function lib;
8717 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
8718 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
8719 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
8720 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
8721 ignore, lib);
8722 if (target)
8723 return target;
8724 break;
8726 case BUILT_IN_ATOMIC_NAND_FETCH_1:
8727 case BUILT_IN_ATOMIC_NAND_FETCH_2:
8728 case BUILT_IN_ATOMIC_NAND_FETCH_4:
8729 case BUILT_IN_ATOMIC_NAND_FETCH_8:
8730 case BUILT_IN_ATOMIC_NAND_FETCH_16:
8732 enum built_in_function lib;
8733 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
8734 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
8735 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
8736 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
8737 ignore, lib);
8738 if (target)
8739 return target;
8740 break;
8742 case BUILT_IN_ATOMIC_XOR_FETCH_1:
8743 case BUILT_IN_ATOMIC_XOR_FETCH_2:
8744 case BUILT_IN_ATOMIC_XOR_FETCH_4:
8745 case BUILT_IN_ATOMIC_XOR_FETCH_8:
8746 case BUILT_IN_ATOMIC_XOR_FETCH_16:
8748 enum built_in_function lib;
8749 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
8750 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
8751 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
8752 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
8753 ignore, lib);
8754 if (target)
8755 return target;
8756 break;
8758 case BUILT_IN_ATOMIC_OR_FETCH_1:
8759 case BUILT_IN_ATOMIC_OR_FETCH_2:
8760 case BUILT_IN_ATOMIC_OR_FETCH_4:
8761 case BUILT_IN_ATOMIC_OR_FETCH_8:
8762 case BUILT_IN_ATOMIC_OR_FETCH_16:
8764 enum built_in_function lib;
8765 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
8766 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
8767 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
8768 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
8769 ignore, lib);
8770 if (target)
8771 return target;
8772 break;
8774 case BUILT_IN_ATOMIC_FETCH_ADD_1:
8775 case BUILT_IN_ATOMIC_FETCH_ADD_2:
8776 case BUILT_IN_ATOMIC_FETCH_ADD_4:
8777 case BUILT_IN_ATOMIC_FETCH_ADD_8:
8778 case BUILT_IN_ATOMIC_FETCH_ADD_16:
8779 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
8780 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
8781 ignore, BUILT_IN_NONE);
8782 if (target)
8783 return target;
8784 break;
8786 case BUILT_IN_ATOMIC_FETCH_SUB_1:
8787 case BUILT_IN_ATOMIC_FETCH_SUB_2:
8788 case BUILT_IN_ATOMIC_FETCH_SUB_4:
8789 case BUILT_IN_ATOMIC_FETCH_SUB_8:
8790 case BUILT_IN_ATOMIC_FETCH_SUB_16:
8791 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
8792 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
8793 ignore, BUILT_IN_NONE);
8794 if (target)
8795 return target;
8796 break;
8798 case BUILT_IN_ATOMIC_FETCH_AND_1:
8799 case BUILT_IN_ATOMIC_FETCH_AND_2:
8800 case BUILT_IN_ATOMIC_FETCH_AND_4:
8801 case BUILT_IN_ATOMIC_FETCH_AND_8:
8802 case BUILT_IN_ATOMIC_FETCH_AND_16:
8803 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8804 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8805 ignore, BUILT_IN_NONE);
8806 if (target)
8807 return target;
8808 break;
8810 case BUILT_IN_ATOMIC_FETCH_NAND_1:
8811 case BUILT_IN_ATOMIC_FETCH_NAND_2:
8812 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8813 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8814 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8815 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8816 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8817 ignore, BUILT_IN_NONE);
8818 if (target)
8819 return target;
8820 break;
8822 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8823 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8824 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8825 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8826 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8827 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8828 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8829 ignore, BUILT_IN_NONE);
8830 if (target)
8831 return target;
8832 break;
8834 case BUILT_IN_ATOMIC_FETCH_OR_1:
8835 case BUILT_IN_ATOMIC_FETCH_OR_2:
8836 case BUILT_IN_ATOMIC_FETCH_OR_4:
8837 case BUILT_IN_ATOMIC_FETCH_OR_8:
8838 case BUILT_IN_ATOMIC_FETCH_OR_16:
8839 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8840 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8841 ignore, BUILT_IN_NONE);
8842 if (target)
8843 return target;
8844 break;
8846 case BUILT_IN_ATOMIC_TEST_AND_SET:
8847 target = expand_builtin_atomic_test_and_set (exp, target);
8848 if (target)
8849 return target;
8850 break;
8852 case BUILT_IN_ATOMIC_CLEAR:
8853 return expand_builtin_atomic_clear (exp);
8855 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8856 return expand_builtin_atomic_always_lock_free (exp);
8858 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8859 target = expand_builtin_atomic_is_lock_free (exp);
8860 if (target)
8861 return target;
8862 break;
8864 case BUILT_IN_ATOMIC_THREAD_FENCE:
8865 expand_builtin_atomic_thread_fence (exp);
8866 return const0_rtx;
8868 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8869 expand_builtin_atomic_signal_fence (exp);
8870 return const0_rtx;
8872 case BUILT_IN_OBJECT_SIZE:
8873 case BUILT_IN_DYNAMIC_OBJECT_SIZE:
8874 return expand_builtin_object_size (exp);
8876 case BUILT_IN_MEMCPY_CHK:
8877 case BUILT_IN_MEMPCPY_CHK:
8878 case BUILT_IN_MEMMOVE_CHK:
8879 case BUILT_IN_MEMSET_CHK:
8880 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8881 if (target)
8882 return target;
8883 break;
8885 case BUILT_IN_STRCPY_CHK:
8886 case BUILT_IN_STPCPY_CHK:
8887 case BUILT_IN_STRNCPY_CHK:
8888 case BUILT_IN_STPNCPY_CHK:
8889 case BUILT_IN_STRCAT_CHK:
8890 case BUILT_IN_STRNCAT_CHK:
8891 case BUILT_IN_SNPRINTF_CHK:
8892 case BUILT_IN_VSNPRINTF_CHK:
8893 maybe_emit_chk_warning (exp, fcode);
8894 break;
8896 case BUILT_IN_SPRINTF_CHK:
8897 case BUILT_IN_VSPRINTF_CHK:
8898 maybe_emit_sprintf_chk_warning (exp, fcode);
8899 break;
8901 case BUILT_IN_THREAD_POINTER:
8902 return expand_builtin_thread_pointer (exp, target);
8904 case BUILT_IN_SET_THREAD_POINTER:
8905 expand_builtin_set_thread_pointer (exp);
8906 return const0_rtx;
8908 case BUILT_IN_ACC_ON_DEVICE:
8909 /* Do library call, if we failed to expand the builtin when
8910 folding. */
8911 break;
8913 case BUILT_IN_GOACC_PARLEVEL_ID:
8914 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8915 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8917 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8918 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8920 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8921 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8922 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8923 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8924 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8925 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8926 return expand_speculation_safe_value (mode, exp, target, ignore);
8928 default: /* just do library call, if unknown builtin */
8929 break;
8932 /* The switch statement above can drop through to cause the function
8933 to be called normally. */
8934 return expand_call (exp, target, ignore);
8937 /* Determine whether a tree node represents a call to a built-in
8938 function. If the tree T is a call to a built-in function with
8939 the right number of arguments of the appropriate types, return
8940 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8941 Otherwise the return value is END_BUILTINS. */
8943 enum built_in_function
8944 builtin_mathfn_code (const_tree t)
8946 const_tree fndecl, arg, parmlist;
8947 const_tree argtype, parmtype;
8948 const_call_expr_arg_iterator iter;
8950 if (TREE_CODE (t) != CALL_EXPR)
8951 return END_BUILTINS;
8953 fndecl = get_callee_fndecl (t);
8954 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8955 return END_BUILTINS;
8957 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8958 init_const_call_expr_arg_iterator (t, &iter);
8959 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8961 /* If a function doesn't take a variable number of arguments,
8962 the last element in the list will have type `void'. */
8963 parmtype = TREE_VALUE (parmlist);
8964 if (VOID_TYPE_P (parmtype))
8966 if (more_const_call_expr_args_p (&iter))
8967 return END_BUILTINS;
8968 return DECL_FUNCTION_CODE (fndecl);
8971 if (! more_const_call_expr_args_p (&iter))
8972 return END_BUILTINS;
8974 arg = next_const_call_expr_arg (&iter);
8975 argtype = TREE_TYPE (arg);
8977 if (SCALAR_FLOAT_TYPE_P (parmtype))
8979 if (! SCALAR_FLOAT_TYPE_P (argtype))
8980 return END_BUILTINS;
8982 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8984 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8985 return END_BUILTINS;
8987 else if (POINTER_TYPE_P (parmtype))
8989 if (! POINTER_TYPE_P (argtype))
8990 return END_BUILTINS;
8992 else if (INTEGRAL_TYPE_P (parmtype))
8994 if (! INTEGRAL_TYPE_P (argtype))
8995 return END_BUILTINS;
8997 else
8998 return END_BUILTINS;
9001 /* Variable-length argument list. */
9002 return DECL_FUNCTION_CODE (fndecl);
9005 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
9006 evaluate to a constant. */
9008 static tree
9009 fold_builtin_constant_p (tree arg)
9011 /* We return 1 for a numeric type that's known to be a constant
9012 value at compile-time or for an aggregate type that's a
9013 literal constant. */
9014 STRIP_NOPS (arg);
9016 /* If we know this is a constant, emit the constant of one. */
9017 if (CONSTANT_CLASS_P (arg)
9018 || (TREE_CODE (arg) == CONSTRUCTOR
9019 && TREE_CONSTANT (arg)))
9020 return integer_one_node;
9021 if (TREE_CODE (arg) == ADDR_EXPR)
9023 tree op = TREE_OPERAND (arg, 0);
9024 if (TREE_CODE (op) == STRING_CST
9025 || (TREE_CODE (op) == ARRAY_REF
9026 && integer_zerop (TREE_OPERAND (op, 1))
9027 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
9028 return integer_one_node;
9031 /* If this expression has side effects, show we don't know it to be a
9032 constant. Likewise if it's a pointer or aggregate type since in
9033 those case we only want literals, since those are only optimized
9034 when generating RTL, not later.
9035 And finally, if we are compiling an initializer, not code, we
9036 need to return a definite result now; there's not going to be any
9037 more optimization done. */
9038 if (TREE_SIDE_EFFECTS (arg)
9039 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
9040 || POINTER_TYPE_P (TREE_TYPE (arg))
9041 || cfun == 0
9042 || folding_initializer
9043 || force_folding_builtin_constant_p)
9044 return integer_zero_node;
9046 return NULL_TREE;
9049 /* Create builtin_expect or builtin_expect_with_probability
9050 with PRED and EXPECTED as its arguments and return it as a truthvalue.
9051 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
9052 builtin_expect_with_probability instead uses third argument as PROBABILITY
9053 value. */
9055 static tree
9056 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
9057 tree predictor, tree probability)
9059 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
9061 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
9062 : BUILT_IN_EXPECT_WITH_PROBABILITY);
9063 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
9064 ret_type = TREE_TYPE (TREE_TYPE (fn));
9065 pred_type = TREE_VALUE (arg_types);
9066 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
9068 pred = fold_convert_loc (loc, pred_type, pred);
9069 expected = fold_convert_loc (loc, expected_type, expected);
9071 if (probability)
9072 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
9073 else
9074 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
9075 predictor);
9077 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
9078 build_int_cst (ret_type, 0));
9081 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
9082 NULL_TREE if no simplification is possible. */
9084 tree
9085 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
9086 tree arg3)
9088 tree inner, fndecl, inner_arg0;
9089 enum tree_code code;
9091 /* Distribute the expected value over short-circuiting operators.
9092 See through the cast from truthvalue_type_node to long. */
9093 inner_arg0 = arg0;
9094 while (CONVERT_EXPR_P (inner_arg0)
9095 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
9096 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
9097 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
9099 /* If this is a builtin_expect within a builtin_expect keep the
9100 inner one. See through a comparison against a constant. It
9101 might have been added to create a thruthvalue. */
9102 inner = inner_arg0;
9104 if (COMPARISON_CLASS_P (inner)
9105 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
9106 inner = TREE_OPERAND (inner, 0);
9108 if (TREE_CODE (inner) == CALL_EXPR
9109 && (fndecl = get_callee_fndecl (inner))
9110 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT,
9111 BUILT_IN_EXPECT_WITH_PROBABILITY))
9112 return arg0;
9114 inner = inner_arg0;
9115 code = TREE_CODE (inner);
9116 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
9118 tree op0 = TREE_OPERAND (inner, 0);
9119 tree op1 = TREE_OPERAND (inner, 1);
9120 arg1 = save_expr (arg1);
9122 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
9123 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
9124 inner = build2 (code, TREE_TYPE (inner), op0, op1);
9126 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
9129 /* If the argument isn't invariant then there's nothing else we can do. */
9130 if (!TREE_CONSTANT (inner_arg0))
9131 return NULL_TREE;
9133 /* If we expect that a comparison against the argument will fold to
9134 a constant return the constant. In practice, this means a true
9135 constant or the address of a non-weak symbol. */
9136 inner = inner_arg0;
9137 STRIP_NOPS (inner);
9138 if (TREE_CODE (inner) == ADDR_EXPR)
9142 inner = TREE_OPERAND (inner, 0);
9144 while (TREE_CODE (inner) == COMPONENT_REF
9145 || TREE_CODE (inner) == ARRAY_REF);
9146 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
9147 return NULL_TREE;
9150 /* Otherwise, ARG0 already has the proper type for the return value. */
9151 return arg0;
9154 /* Fold a call to __builtin_classify_type with argument ARG. */
9156 static tree
9157 fold_builtin_classify_type (tree arg)
9159 if (arg == 0)
9160 return build_int_cst (integer_type_node, no_type_class);
9162 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
9165 /* Fold a call EXPR (which may be null) to __builtin_strlen with argument
9166 ARG. */
9168 static tree
9169 fold_builtin_strlen (location_t loc, tree expr, tree type, tree arg)
9171 if (!validate_arg (arg, POINTER_TYPE))
9172 return NULL_TREE;
9173 else
9175 c_strlen_data lendata = { };
9176 tree len = c_strlen (arg, 0, &lendata);
9178 if (len)
9179 return fold_convert_loc (loc, type, len);
9181 /* TODO: Move this to gimple-ssa-warn-access once the pass runs
9182 also early enough to detect invalid reads in multimensional
9183 arrays and struct members. */
9184 if (!lendata.decl)
9185 c_strlen (arg, 1, &lendata);
9187 if (lendata.decl)
9189 if (EXPR_HAS_LOCATION (arg))
9190 loc = EXPR_LOCATION (arg);
9191 else if (loc == UNKNOWN_LOCATION)
9192 loc = input_location;
9193 warn_string_no_nul (loc, expr, "strlen", arg, lendata.decl);
9196 return NULL_TREE;
9200 /* Fold a call to __builtin_inf or __builtin_huge_val. */
9202 static tree
9203 fold_builtin_inf (location_t loc, tree type, int warn)
9205 /* __builtin_inff is intended to be usable to define INFINITY on all
9206 targets. If an infinity is not available, INFINITY expands "to a
9207 positive constant of type float that overflows at translation
9208 time", footnote "In this case, using INFINITY will violate the
9209 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
9210 Thus we pedwarn to ensure this constraint violation is
9211 diagnosed. */
9212 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
9213 pedwarn (loc, 0, "target format does not support infinity");
9215 return build_real (type, dconstinf);
9218 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
9219 NULL_TREE if no simplification can be made. */
9221 static tree
9222 fold_builtin_sincos (location_t loc,
9223 tree arg0, tree arg1, tree arg2)
9225 tree type;
9226 tree fndecl, call = NULL_TREE;
9228 if (!validate_arg (arg0, REAL_TYPE)
9229 || !validate_arg (arg1, POINTER_TYPE)
9230 || !validate_arg (arg2, POINTER_TYPE))
9231 return NULL_TREE;
9233 type = TREE_TYPE (arg0);
9235 /* Calculate the result when the argument is a constant. */
9236 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
9237 if (fn == END_BUILTINS)
9238 return NULL_TREE;
9240 /* Canonicalize sincos to cexpi. */
9241 if (TREE_CODE (arg0) == REAL_CST)
9243 tree complex_type = build_complex_type (type);
9244 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
9246 if (!call)
9248 if (!targetm.libc_has_function (function_c99_math_complex, type)
9249 || !builtin_decl_implicit_p (fn))
9250 return NULL_TREE;
9251 fndecl = builtin_decl_explicit (fn);
9252 call = build_call_expr_loc (loc, fndecl, 1, arg0);
9253 call = builtin_save_expr (call);
9256 tree ptype = build_pointer_type (type);
9257 arg1 = fold_convert (ptype, arg1);
9258 arg2 = fold_convert (ptype, arg2);
9259 return build2 (COMPOUND_EXPR, void_type_node,
9260 build2 (MODIFY_EXPR, void_type_node,
9261 build_fold_indirect_ref_loc (loc, arg1),
9262 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
9263 build2 (MODIFY_EXPR, void_type_node,
9264 build_fold_indirect_ref_loc (loc, arg2),
9265 fold_build1_loc (loc, REALPART_EXPR, type, call)));
9268 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9269 Return NULL_TREE if no simplification can be made. */
9271 static tree
9272 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9274 if (!validate_arg (arg1, POINTER_TYPE)
9275 || !validate_arg (arg2, POINTER_TYPE)
9276 || !validate_arg (len, INTEGER_TYPE))
9277 return NULL_TREE;
9279 /* If the LEN parameter is zero, return zero. */
9280 if (integer_zerop (len))
9281 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9282 arg1, arg2);
9284 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9285 if (operand_equal_p (arg1, arg2, 0))
9286 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9288 /* If len parameter is one, return an expression corresponding to
9289 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9290 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
9292 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9293 tree cst_uchar_ptr_node
9294 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9296 tree ind1
9297 = fold_convert_loc (loc, integer_type_node,
9298 build1 (INDIRECT_REF, cst_uchar_node,
9299 fold_convert_loc (loc,
9300 cst_uchar_ptr_node,
9301 arg1)));
9302 tree ind2
9303 = fold_convert_loc (loc, integer_type_node,
9304 build1 (INDIRECT_REF, cst_uchar_node,
9305 fold_convert_loc (loc,
9306 cst_uchar_ptr_node,
9307 arg2)));
9308 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9311 return NULL_TREE;
9314 /* Fold a call to builtin isascii with argument ARG. */
9316 static tree
9317 fold_builtin_isascii (location_t loc, tree arg)
9319 if (!validate_arg (arg, INTEGER_TYPE))
9320 return NULL_TREE;
9321 else
9323 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9324 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9325 build_int_cst (integer_type_node,
9326 ~ (unsigned HOST_WIDE_INT) 0x7f));
9327 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9328 arg, integer_zero_node);
9332 /* Fold a call to builtin toascii with argument ARG. */
9334 static tree
9335 fold_builtin_toascii (location_t loc, tree arg)
9337 if (!validate_arg (arg, INTEGER_TYPE))
9338 return NULL_TREE;
9340 /* Transform toascii(c) -> (c & 0x7f). */
9341 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9342 build_int_cst (integer_type_node, 0x7f));
9345 /* Fold a call to builtin isdigit with argument ARG. */
9347 static tree
9348 fold_builtin_isdigit (location_t loc, tree arg)
9350 if (!validate_arg (arg, INTEGER_TYPE))
9351 return NULL_TREE;
9352 else
9354 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9355 /* According to the C standard, isdigit is unaffected by locale.
9356 However, it definitely is affected by the target character set. */
9357 unsigned HOST_WIDE_INT target_digit0
9358 = lang_hooks.to_target_charset ('0');
9360 if (target_digit0 == 0)
9361 return NULL_TREE;
9363 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9364 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9365 build_int_cst (unsigned_type_node, target_digit0));
9366 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9367 build_int_cst (unsigned_type_node, 9));
9371 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9373 static tree
9374 fold_builtin_fabs (location_t loc, tree arg, tree type)
9376 if (!validate_arg (arg, REAL_TYPE))
9377 return NULL_TREE;
9379 arg = fold_convert_loc (loc, type, arg);
9380 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9383 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9385 static tree
9386 fold_builtin_abs (location_t loc, tree arg, tree type)
9388 if (!validate_arg (arg, INTEGER_TYPE))
9389 return NULL_TREE;
9391 arg = fold_convert_loc (loc, type, arg);
9392 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9395 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9397 static tree
9398 fold_builtin_carg (location_t loc, tree arg, tree type)
9400 if (validate_arg (arg, COMPLEX_TYPE)
9401 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_TYPE (arg))))
9403 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9405 if (atan2_fn)
9407 tree new_arg = builtin_save_expr (arg);
9408 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9409 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9410 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9414 return NULL_TREE;
9417 /* Fold a call to builtin frexp, we can assume the base is 2. */
9419 static tree
9420 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9422 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9423 return NULL_TREE;
9425 STRIP_NOPS (arg0);
9427 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9428 return NULL_TREE;
9430 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9432 /* Proceed if a valid pointer type was passed in. */
9433 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9435 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9436 tree frac, exp, res;
9438 switch (value->cl)
9440 case rvc_zero:
9441 /* For +-0, return (*exp = 0, +-0). */
9442 exp = integer_zero_node;
9443 frac = arg0;
9444 break;
9445 case rvc_nan:
9446 case rvc_inf:
9447 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9448 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9449 case rvc_normal:
9451 /* Since the frexp function always expects base 2, and in
9452 GCC normalized significands are already in the range
9453 [0.5, 1.0), we have exactly what frexp wants. */
9454 REAL_VALUE_TYPE frac_rvt = *value;
9455 SET_REAL_EXP (&frac_rvt, 0);
9456 frac = build_real (rettype, frac_rvt);
9457 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9459 break;
9460 default:
9461 gcc_unreachable ();
9464 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9465 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9466 TREE_SIDE_EFFECTS (arg1) = 1;
9467 res = fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9468 suppress_warning (res, OPT_Wunused_value);
9469 return res;
9472 return NULL_TREE;
9475 /* Fold a call to builtin modf. */
9477 static tree
9478 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9480 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9481 return NULL_TREE;
9483 STRIP_NOPS (arg0);
9485 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9486 return NULL_TREE;
9488 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9490 /* Proceed if a valid pointer type was passed in. */
9491 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9493 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9494 REAL_VALUE_TYPE trunc, frac;
9495 tree res;
9497 switch (value->cl)
9499 case rvc_nan:
9500 case rvc_zero:
9501 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9502 trunc = frac = *value;
9503 break;
9504 case rvc_inf:
9505 /* For +-Inf, return (*arg1 = arg0, +-0). */
9506 frac = dconst0;
9507 frac.sign = value->sign;
9508 trunc = *value;
9509 break;
9510 case rvc_normal:
9511 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9512 real_trunc (&trunc, VOIDmode, value);
9513 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9514 /* If the original number was negative and already
9515 integral, then the fractional part is -0.0. */
9516 if (value->sign && frac.cl == rvc_zero)
9517 frac.sign = value->sign;
9518 break;
9521 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9522 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9523 build_real (rettype, trunc));
9524 TREE_SIDE_EFFECTS (arg1) = 1;
9525 res = fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9526 build_real (rettype, frac));
9527 suppress_warning (res, OPT_Wunused_value);
9528 return res;
9531 return NULL_TREE;
9534 /* Given a location LOC, an interclass builtin function decl FNDECL
9535 and its single argument ARG, return an folded expression computing
9536 the same, or NULL_TREE if we either couldn't or didn't want to fold
9537 (the latter happen if there's an RTL instruction available). */
9539 static tree
9540 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9542 machine_mode mode;
9544 if (!validate_arg (arg, REAL_TYPE))
9545 return NULL_TREE;
9547 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9548 return NULL_TREE;
9550 mode = TYPE_MODE (TREE_TYPE (arg));
9552 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
9554 /* If there is no optab, try generic code. */
9555 switch (DECL_FUNCTION_CODE (fndecl))
9557 tree result;
9559 CASE_FLT_FN (BUILT_IN_ISINF):
9561 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9562 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9563 tree type = TREE_TYPE (arg);
9564 REAL_VALUE_TYPE r;
9565 char buf[128];
9567 if (is_ibm_extended)
9569 /* NaN and Inf are encoded in the high-order double value
9570 only. The low-order value is not significant. */
9571 type = double_type_node;
9572 mode = DFmode;
9573 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9575 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9576 real_from_string (&r, buf);
9577 result = build_call_expr (isgr_fn, 2,
9578 fold_build1_loc (loc, ABS_EXPR, type, arg),
9579 build_real (type, r));
9580 return result;
9582 CASE_FLT_FN (BUILT_IN_FINITE):
9583 case BUILT_IN_ISFINITE:
9585 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9586 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9587 tree type = TREE_TYPE (arg);
9588 REAL_VALUE_TYPE r;
9589 char buf[128];
9591 if (is_ibm_extended)
9593 /* NaN and Inf are encoded in the high-order double value
9594 only. The low-order value is not significant. */
9595 type = double_type_node;
9596 mode = DFmode;
9597 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9599 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9600 real_from_string (&r, buf);
9601 result = build_call_expr (isle_fn, 2,
9602 fold_build1_loc (loc, ABS_EXPR, type, arg),
9603 build_real (type, r));
9604 /*result = fold_build2_loc (loc, UNGT_EXPR,
9605 TREE_TYPE (TREE_TYPE (fndecl)),
9606 fold_build1_loc (loc, ABS_EXPR, type, arg),
9607 build_real (type, r));
9608 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9609 TREE_TYPE (TREE_TYPE (fndecl)),
9610 result);*/
9611 return result;
9613 case BUILT_IN_ISNORMAL:
9615 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9616 islessequal(fabs(x),DBL_MAX). */
9617 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9618 tree type = TREE_TYPE (arg);
9619 tree orig_arg, max_exp, min_exp;
9620 machine_mode orig_mode = mode;
9621 REAL_VALUE_TYPE rmax, rmin;
9622 char buf[128];
9624 orig_arg = arg = builtin_save_expr (arg);
9625 if (is_ibm_extended)
9627 /* Use double to test the normal range of IBM extended
9628 precision. Emin for IBM extended precision is
9629 different to emin for IEEE double, being 53 higher
9630 since the low double exponent is at least 53 lower
9631 than the high double exponent. */
9632 type = double_type_node;
9633 mode = DFmode;
9634 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9636 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
9638 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9639 real_from_string (&rmax, buf);
9640 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9641 real_from_string (&rmin, buf);
9642 max_exp = build_real (type, rmax);
9643 min_exp = build_real (type, rmin);
9645 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
9646 if (is_ibm_extended)
9648 /* Testing the high end of the range is done just using
9649 the high double, using the same test as isfinite().
9650 For the subnormal end of the range we first test the
9651 high double, then if its magnitude is equal to the
9652 limit of 0x1p-969, we test whether the low double is
9653 non-zero and opposite sign to the high double. */
9654 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
9655 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9656 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
9657 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
9658 arg, min_exp);
9659 tree as_complex = build1 (VIEW_CONVERT_EXPR,
9660 complex_double_type_node, orig_arg);
9661 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
9662 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
9663 tree zero = build_real (type, dconst0);
9664 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
9665 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
9666 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
9667 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
9668 fold_build3 (COND_EXPR,
9669 integer_type_node,
9670 hilt, logt, lolt));
9671 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
9672 eq_min, ok_lo);
9673 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
9674 gt_min, eq_min);
9676 else
9678 tree const isge_fn
9679 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9680 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
9682 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
9683 max_exp, min_exp);
9684 return result;
9686 default:
9687 break;
9690 return NULL_TREE;
9693 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9694 ARG is the argument for the call. */
9696 static tree
9697 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9699 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9701 if (!validate_arg (arg, REAL_TYPE))
9702 return NULL_TREE;
9704 switch (builtin_index)
9706 case BUILT_IN_ISINF:
9707 if (tree_expr_infinite_p (arg))
9708 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9709 if (!tree_expr_maybe_infinite_p (arg))
9710 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9711 return NULL_TREE;
9713 case BUILT_IN_ISINF_SIGN:
9715 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9716 /* In a boolean context, GCC will fold the inner COND_EXPR to
9717 1. So e.g. "if (isinf_sign(x))" would be folded to just
9718 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9719 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
9720 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9721 tree tmp = NULL_TREE;
9723 arg = builtin_save_expr (arg);
9725 if (signbit_fn && isinf_fn)
9727 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9728 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9730 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9731 signbit_call, integer_zero_node);
9732 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9733 isinf_call, integer_zero_node);
9735 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9736 integer_minus_one_node, integer_one_node);
9737 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9738 isinf_call, tmp,
9739 integer_zero_node);
9742 return tmp;
9745 case BUILT_IN_ISFINITE:
9746 if (tree_expr_finite_p (arg))
9747 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9748 if (tree_expr_nan_p (arg) || tree_expr_infinite_p (arg))
9749 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9750 return NULL_TREE;
9752 case BUILT_IN_ISNAN:
9753 if (tree_expr_nan_p (arg))
9754 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9755 if (!tree_expr_maybe_nan_p (arg))
9756 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9759 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
9760 if (is_ibm_extended)
9762 /* NaN and Inf are encoded in the high-order double value
9763 only. The low-order value is not significant. */
9764 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9767 arg = builtin_save_expr (arg);
9768 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9770 case BUILT_IN_ISSIGNALING:
9771 /* Folding to true for REAL_CST is done in fold_const_call_ss.
9772 Don't use tree_expr_signaling_nan_p (arg) -> integer_one_node
9773 and !tree_expr_maybe_signaling_nan_p (arg) -> integer_zero_node
9774 here, so there is some possibility of __builtin_issignaling working
9775 without -fsignaling-nans. Especially when -fno-signaling-nans is
9776 the default. */
9777 if (!tree_expr_maybe_nan_p (arg))
9778 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9779 return NULL_TREE;
9781 default:
9782 gcc_unreachable ();
9786 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9787 This builtin will generate code to return the appropriate floating
9788 point classification depending on the value of the floating point
9789 number passed in. The possible return values must be supplied as
9790 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9791 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9792 one floating point argument which is "type generic". */
9794 static tree
9795 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9797 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9798 arg, type, res, tmp;
9799 machine_mode mode;
9800 REAL_VALUE_TYPE r;
9801 char buf[128];
9803 /* Verify the required arguments in the original call. */
9804 if (nargs != 6
9805 || !validate_arg (args[0], INTEGER_TYPE)
9806 || !validate_arg (args[1], INTEGER_TYPE)
9807 || !validate_arg (args[2], INTEGER_TYPE)
9808 || !validate_arg (args[3], INTEGER_TYPE)
9809 || !validate_arg (args[4], INTEGER_TYPE)
9810 || !validate_arg (args[5], REAL_TYPE))
9811 return NULL_TREE;
9813 fp_nan = args[0];
9814 fp_infinite = args[1];
9815 fp_normal = args[2];
9816 fp_subnormal = args[3];
9817 fp_zero = args[4];
9818 arg = args[5];
9819 type = TREE_TYPE (arg);
9820 mode = TYPE_MODE (type);
9821 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9823 /* fpclassify(x) ->
9824 isnan(x) ? FP_NAN :
9825 (fabs(x) == Inf ? FP_INFINITE :
9826 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9827 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9829 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9830 build_real (type, dconst0));
9831 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9832 tmp, fp_zero, fp_subnormal);
9834 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9835 real_from_string (&r, buf);
9836 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9837 arg, build_real (type, r));
9838 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9840 if (tree_expr_maybe_infinite_p (arg))
9842 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9843 build_real (type, dconstinf));
9844 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9845 fp_infinite, res);
9848 if (tree_expr_maybe_nan_p (arg))
9850 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9851 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9854 return res;
9857 /* Fold a call to an unordered comparison function such as
9858 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9859 being called and ARG0 and ARG1 are the arguments for the call.
9860 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9861 the opposite of the desired result. UNORDERED_CODE is used
9862 for modes that can hold NaNs and ORDERED_CODE is used for
9863 the rest. */
9865 static tree
9866 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9867 enum tree_code unordered_code,
9868 enum tree_code ordered_code)
9870 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9871 enum tree_code code;
9872 tree type0, type1;
9873 enum tree_code code0, code1;
9874 tree cmp_type = NULL_TREE;
9876 type0 = TREE_TYPE (arg0);
9877 type1 = TREE_TYPE (arg1);
9879 code0 = TREE_CODE (type0);
9880 code1 = TREE_CODE (type1);
9882 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9883 /* Choose the wider of two real types. */
9884 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9885 ? type0 : type1;
9886 else if (code0 == REAL_TYPE
9887 && (code1 == INTEGER_TYPE || code1 == BITINT_TYPE))
9888 cmp_type = type0;
9889 else if ((code0 == INTEGER_TYPE || code0 == BITINT_TYPE)
9890 && code1 == REAL_TYPE)
9891 cmp_type = type1;
9893 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9894 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9896 if (unordered_code == UNORDERED_EXPR)
9898 if (tree_expr_nan_p (arg0) || tree_expr_nan_p (arg1))
9899 return omit_two_operands_loc (loc, type, integer_one_node, arg0, arg1);
9900 if (!tree_expr_maybe_nan_p (arg0) && !tree_expr_maybe_nan_p (arg1))
9901 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9902 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9905 code = (tree_expr_maybe_nan_p (arg0) || tree_expr_maybe_nan_p (arg1))
9906 ? unordered_code : ordered_code;
9907 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9908 fold_build2_loc (loc, code, type, arg0, arg1));
9911 /* Fold a call to __builtin_iseqsig(). ARG0 and ARG1 are the arguments.
9912 After choosing the wider floating-point type for the comparison,
9913 the code is folded to:
9914 SAVE_EXPR<ARG0> >= SAVE_EXPR<ARG1> && SAVE_EXPR<ARG0> <= SAVE_EXPR<ARG1> */
9916 static tree
9917 fold_builtin_iseqsig (location_t loc, tree arg0, tree arg1)
9919 tree type0, type1;
9920 enum tree_code code0, code1;
9921 tree cmp1, cmp2, cmp_type = NULL_TREE;
9923 type0 = TREE_TYPE (arg0);
9924 type1 = TREE_TYPE (arg1);
9926 code0 = TREE_CODE (type0);
9927 code1 = TREE_CODE (type1);
9929 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9930 /* Choose the wider of two real types. */
9931 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9932 ? type0 : type1;
9933 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9934 cmp_type = type0;
9935 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9936 cmp_type = type1;
9938 arg0 = builtin_save_expr (fold_convert_loc (loc, cmp_type, arg0));
9939 arg1 = builtin_save_expr (fold_convert_loc (loc, cmp_type, arg1));
9941 cmp1 = fold_build2_loc (loc, GE_EXPR, integer_type_node, arg0, arg1);
9942 cmp2 = fold_build2_loc (loc, LE_EXPR, integer_type_node, arg0, arg1);
9944 return fold_build2_loc (loc, TRUTH_AND_EXPR, integer_type_node, cmp1, cmp2);
9947 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9948 arithmetics if it can never overflow, or into internal functions that
9949 return both result of arithmetics and overflowed boolean flag in
9950 a complex integer result, or some other check for overflow.
9951 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9952 checking part of that. */
9954 static tree
9955 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9956 tree arg0, tree arg1, tree arg2)
9958 enum internal_fn ifn = IFN_LAST;
9959 /* The code of the expression corresponding to the built-in. */
9960 enum tree_code opcode = ERROR_MARK;
9961 bool ovf_only = false;
9963 switch (fcode)
9965 case BUILT_IN_ADD_OVERFLOW_P:
9966 ovf_only = true;
9967 /* FALLTHRU */
9968 case BUILT_IN_ADD_OVERFLOW:
9969 case BUILT_IN_SADD_OVERFLOW:
9970 case BUILT_IN_SADDL_OVERFLOW:
9971 case BUILT_IN_SADDLL_OVERFLOW:
9972 case BUILT_IN_UADD_OVERFLOW:
9973 case BUILT_IN_UADDL_OVERFLOW:
9974 case BUILT_IN_UADDLL_OVERFLOW:
9975 opcode = PLUS_EXPR;
9976 ifn = IFN_ADD_OVERFLOW;
9977 break;
9978 case BUILT_IN_SUB_OVERFLOW_P:
9979 ovf_only = true;
9980 /* FALLTHRU */
9981 case BUILT_IN_SUB_OVERFLOW:
9982 case BUILT_IN_SSUB_OVERFLOW:
9983 case BUILT_IN_SSUBL_OVERFLOW:
9984 case BUILT_IN_SSUBLL_OVERFLOW:
9985 case BUILT_IN_USUB_OVERFLOW:
9986 case BUILT_IN_USUBL_OVERFLOW:
9987 case BUILT_IN_USUBLL_OVERFLOW:
9988 opcode = MINUS_EXPR;
9989 ifn = IFN_SUB_OVERFLOW;
9990 break;
9991 case BUILT_IN_MUL_OVERFLOW_P:
9992 ovf_only = true;
9993 /* FALLTHRU */
9994 case BUILT_IN_MUL_OVERFLOW:
9995 case BUILT_IN_SMUL_OVERFLOW:
9996 case BUILT_IN_SMULL_OVERFLOW:
9997 case BUILT_IN_SMULLL_OVERFLOW:
9998 case BUILT_IN_UMUL_OVERFLOW:
9999 case BUILT_IN_UMULL_OVERFLOW:
10000 case BUILT_IN_UMULLL_OVERFLOW:
10001 opcode = MULT_EXPR;
10002 ifn = IFN_MUL_OVERFLOW;
10003 break;
10004 default:
10005 gcc_unreachable ();
10008 /* For the "generic" overloads, the first two arguments can have different
10009 types and the last argument determines the target type to use to check
10010 for overflow. The arguments of the other overloads all have the same
10011 type. */
10012 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
10014 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
10015 arguments are constant, attempt to fold the built-in call into a constant
10016 expression indicating whether or not it detected an overflow. */
10017 if (ovf_only
10018 && TREE_CODE (arg0) == INTEGER_CST
10019 && TREE_CODE (arg1) == INTEGER_CST)
10020 /* Perform the computation in the target type and check for overflow. */
10021 return omit_one_operand_loc (loc, boolean_type_node,
10022 arith_overflowed_p (opcode, type, arg0, arg1)
10023 ? boolean_true_node : boolean_false_node,
10024 arg2);
10026 tree intres, ovfres;
10027 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10029 intres = fold_binary_loc (loc, opcode, type,
10030 fold_convert_loc (loc, type, arg0),
10031 fold_convert_loc (loc, type, arg1));
10032 if (TREE_OVERFLOW (intres))
10033 intres = drop_tree_overflow (intres);
10034 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
10035 ? boolean_true_node : boolean_false_node);
10037 else
10039 tree ctype = build_complex_type (type);
10040 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
10041 arg0, arg1);
10042 tree tgt = save_expr (call);
10043 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
10044 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
10045 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
10048 if (ovf_only)
10049 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
10051 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
10052 tree store
10053 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
10054 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
10057 /* Fold __builtin_{clz,ctz,clrsb,ffs,parity,popcount}g into corresponding
10058 internal function. */
10060 static tree
10061 fold_builtin_bit_query (location_t loc, enum built_in_function fcode,
10062 tree arg0, tree arg1)
10064 enum internal_fn ifn;
10065 enum built_in_function fcodei, fcodel, fcodell;
10066 tree arg0_type = TREE_TYPE (arg0);
10067 tree cast_type = NULL_TREE;
10068 int addend = 0;
10070 switch (fcode)
10072 case BUILT_IN_CLZG:
10073 if (arg1 && TREE_CODE (arg1) != INTEGER_CST)
10074 return NULL_TREE;
10075 ifn = IFN_CLZ;
10076 fcodei = BUILT_IN_CLZ;
10077 fcodel = BUILT_IN_CLZL;
10078 fcodell = BUILT_IN_CLZLL;
10079 break;
10080 case BUILT_IN_CTZG:
10081 if (arg1 && TREE_CODE (arg1) != INTEGER_CST)
10082 return NULL_TREE;
10083 ifn = IFN_CTZ;
10084 fcodei = BUILT_IN_CTZ;
10085 fcodel = BUILT_IN_CTZL;
10086 fcodell = BUILT_IN_CTZLL;
10087 break;
10088 case BUILT_IN_CLRSBG:
10089 ifn = IFN_CLRSB;
10090 fcodei = BUILT_IN_CLRSB;
10091 fcodel = BUILT_IN_CLRSBL;
10092 fcodell = BUILT_IN_CLRSBLL;
10093 break;
10094 case BUILT_IN_FFSG:
10095 ifn = IFN_FFS;
10096 fcodei = BUILT_IN_FFS;
10097 fcodel = BUILT_IN_FFSL;
10098 fcodell = BUILT_IN_FFSLL;
10099 break;
10100 case BUILT_IN_PARITYG:
10101 ifn = IFN_PARITY;
10102 fcodei = BUILT_IN_PARITY;
10103 fcodel = BUILT_IN_PARITYL;
10104 fcodell = BUILT_IN_PARITYLL;
10105 break;
10106 case BUILT_IN_POPCOUNTG:
10107 ifn = IFN_POPCOUNT;
10108 fcodei = BUILT_IN_POPCOUNT;
10109 fcodel = BUILT_IN_POPCOUNTL;
10110 fcodell = BUILT_IN_POPCOUNTLL;
10111 break;
10112 default:
10113 gcc_unreachable ();
10116 if (TYPE_PRECISION (arg0_type)
10117 <= TYPE_PRECISION (long_long_unsigned_type_node))
10119 if (TYPE_PRECISION (arg0_type) <= TYPE_PRECISION (unsigned_type_node))
10121 cast_type = (TYPE_UNSIGNED (arg0_type)
10122 ? unsigned_type_node : integer_type_node);
10123 else if (TYPE_PRECISION (arg0_type)
10124 <= TYPE_PRECISION (long_unsigned_type_node))
10126 cast_type = (TYPE_UNSIGNED (arg0_type)
10127 ? long_unsigned_type_node : long_integer_type_node);
10128 fcodei = fcodel;
10130 else
10132 cast_type = (TYPE_UNSIGNED (arg0_type)
10133 ? long_long_unsigned_type_node
10134 : long_long_integer_type_node);
10135 fcodei = fcodell;
10138 else if (TYPE_PRECISION (arg0_type) <= MAX_FIXED_MODE_SIZE)
10140 cast_type
10141 = build_nonstandard_integer_type (MAX_FIXED_MODE_SIZE,
10142 TYPE_UNSIGNED (arg0_type));
10143 gcc_assert (TYPE_PRECISION (cast_type)
10144 == 2 * TYPE_PRECISION (long_long_unsigned_type_node));
10145 fcodei = END_BUILTINS;
10147 else
10148 fcodei = END_BUILTINS;
10149 if (cast_type)
10151 switch (fcode)
10153 case BUILT_IN_CLZG:
10154 case BUILT_IN_CLRSBG:
10155 addend = TYPE_PRECISION (arg0_type) - TYPE_PRECISION (cast_type);
10156 break;
10157 default:
10158 break;
10160 arg0 = fold_convert (cast_type, arg0);
10161 arg0_type = cast_type;
10164 if (arg1)
10165 arg1 = fold_convert (integer_type_node, arg1);
10167 tree arg2 = arg1;
10168 if (fcode == BUILT_IN_CLZG && addend)
10170 if (arg1)
10171 arg0 = save_expr (arg0);
10172 arg2 = NULL_TREE;
10174 tree call = NULL_TREE, tem;
10175 if (TYPE_PRECISION (arg0_type) == MAX_FIXED_MODE_SIZE
10176 && (TYPE_PRECISION (arg0_type)
10177 == 2 * TYPE_PRECISION (long_long_unsigned_type_node)))
10179 /* __int128 expansions using up to 2 long long builtins. */
10180 arg0 = save_expr (arg0);
10181 tree type = (TYPE_UNSIGNED (arg0_type)
10182 ? long_long_unsigned_type_node
10183 : long_long_integer_type_node);
10184 tree hi = fold_build2 (RSHIFT_EXPR, arg0_type, arg0,
10185 build_int_cst (integer_type_node,
10186 MAX_FIXED_MODE_SIZE / 2));
10187 hi = fold_convert (type, hi);
10188 tree lo = fold_convert (type, arg0);
10189 switch (fcode)
10191 case BUILT_IN_CLZG:
10192 call = fold_builtin_bit_query (loc, fcode, lo, NULL_TREE);
10193 call = fold_build2 (PLUS_EXPR, integer_type_node, call,
10194 build_int_cst (integer_type_node,
10195 MAX_FIXED_MODE_SIZE / 2));
10196 if (arg2)
10197 call = fold_build3 (COND_EXPR, integer_type_node,
10198 fold_build2 (NE_EXPR, boolean_type_node,
10199 lo, build_zero_cst (type)),
10200 call, arg2);
10201 call = fold_build3 (COND_EXPR, integer_type_node,
10202 fold_build2 (NE_EXPR, boolean_type_node,
10203 hi, build_zero_cst (type)),
10204 fold_builtin_bit_query (loc, fcode, hi,
10205 NULL_TREE),
10206 call);
10207 break;
10208 case BUILT_IN_CTZG:
10209 call = fold_builtin_bit_query (loc, fcode, hi, NULL_TREE);
10210 call = fold_build2 (PLUS_EXPR, integer_type_node, call,
10211 build_int_cst (integer_type_node,
10212 MAX_FIXED_MODE_SIZE / 2));
10213 if (arg2)
10214 call = fold_build3 (COND_EXPR, integer_type_node,
10215 fold_build2 (NE_EXPR, boolean_type_node,
10216 hi, build_zero_cst (type)),
10217 call, arg2);
10218 call = fold_build3 (COND_EXPR, integer_type_node,
10219 fold_build2 (NE_EXPR, boolean_type_node,
10220 lo, build_zero_cst (type)),
10221 fold_builtin_bit_query (loc, fcode, lo,
10222 NULL_TREE),
10223 call);
10224 break;
10225 case BUILT_IN_CLRSBG:
10226 tem = fold_builtin_bit_query (loc, fcode, lo, NULL_TREE);
10227 tem = fold_build2 (PLUS_EXPR, integer_type_node, tem,
10228 build_int_cst (integer_type_node,
10229 MAX_FIXED_MODE_SIZE / 2));
10230 tem = fold_build3 (COND_EXPR, integer_type_node,
10231 fold_build2 (LT_EXPR, boolean_type_node,
10232 fold_build2 (BIT_XOR_EXPR, type,
10233 lo, hi),
10234 build_zero_cst (type)),
10235 build_int_cst (integer_type_node,
10236 MAX_FIXED_MODE_SIZE / 2 - 1),
10237 tem);
10238 call = fold_builtin_bit_query (loc, fcode, hi, NULL_TREE);
10239 call = save_expr (call);
10240 call = fold_build3 (COND_EXPR, integer_type_node,
10241 fold_build2 (NE_EXPR, boolean_type_node,
10242 call,
10243 build_int_cst (integer_type_node,
10244 MAX_FIXED_MODE_SIZE
10245 / 2 - 1)),
10246 call, tem);
10247 break;
10248 case BUILT_IN_FFSG:
10249 call = fold_builtin_bit_query (loc, fcode, hi, NULL_TREE);
10250 call = fold_build2 (PLUS_EXPR, integer_type_node, call,
10251 build_int_cst (integer_type_node,
10252 MAX_FIXED_MODE_SIZE / 2));
10253 call = fold_build3 (COND_EXPR, integer_type_node,
10254 fold_build2 (NE_EXPR, boolean_type_node,
10255 hi, build_zero_cst (type)),
10256 call, integer_zero_node);
10257 call = fold_build3 (COND_EXPR, integer_type_node,
10258 fold_build2 (NE_EXPR, boolean_type_node,
10259 lo, build_zero_cst (type)),
10260 fold_builtin_bit_query (loc, fcode, lo,
10261 NULL_TREE),
10262 call);
10263 break;
10264 case BUILT_IN_PARITYG:
10265 call = fold_builtin_bit_query (loc, fcode,
10266 fold_build2 (BIT_XOR_EXPR, type,
10267 lo, hi), NULL_TREE);
10268 break;
10269 case BUILT_IN_POPCOUNTG:
10270 call = fold_build2 (PLUS_EXPR, integer_type_node,
10271 fold_builtin_bit_query (loc, fcode, hi,
10272 NULL_TREE),
10273 fold_builtin_bit_query (loc, fcode, lo,
10274 NULL_TREE));
10275 break;
10276 default:
10277 gcc_unreachable ();
10280 else
10282 /* Only keep second argument to IFN_CLZ/IFN_CTZ if it is the
10283 value defined at zero during GIMPLE, or for large/huge _BitInt
10284 (which are then lowered during bitint lowering). */
10285 if (arg2 && TREE_CODE (TREE_TYPE (arg0)) != BITINT_TYPE)
10287 int val;
10288 if (fcode == BUILT_IN_CLZG)
10290 if (CLZ_DEFINED_VALUE_AT_ZERO (SCALAR_TYPE_MODE (arg0_type),
10291 val) != 2
10292 || wi::to_widest (arg2) != val)
10293 arg2 = NULL_TREE;
10295 else if (CTZ_DEFINED_VALUE_AT_ZERO (SCALAR_TYPE_MODE (arg0_type),
10296 val) != 2
10297 || wi::to_widest (arg2) != val)
10298 arg2 = NULL_TREE;
10299 if (!direct_internal_fn_supported_p (ifn, arg0_type,
10300 OPTIMIZE_FOR_BOTH))
10301 arg2 = NULL_TREE;
10302 if (arg2 == NULL_TREE)
10303 arg0 = save_expr (arg0);
10305 if (fcodei == END_BUILTINS || arg2)
10306 call = build_call_expr_internal_loc (loc, ifn, integer_type_node,
10307 arg2 ? 2 : 1, arg0, arg2);
10308 else
10309 call = build_call_expr_loc (loc, builtin_decl_explicit (fcodei), 1,
10310 arg0);
10312 if (addend)
10313 call = fold_build2 (PLUS_EXPR, integer_type_node, call,
10314 build_int_cst (integer_type_node, addend));
10315 if (arg1 && arg2 == NULL_TREE)
10316 call = fold_build3 (COND_EXPR, integer_type_node,
10317 fold_build2 (NE_EXPR, boolean_type_node,
10318 arg0, build_zero_cst (arg0_type)),
10319 call, arg1);
10321 return call;
10324 /* Fold __builtin_{add,sub}c{,l,ll} into pair of internal functions
10325 that return both result of arithmetics and overflowed boolean
10326 flag in a complex integer result. */
10328 static tree
10329 fold_builtin_addc_subc (location_t loc, enum built_in_function fcode,
10330 tree *args)
10332 enum internal_fn ifn;
10334 switch (fcode)
10336 case BUILT_IN_ADDC:
10337 case BUILT_IN_ADDCL:
10338 case BUILT_IN_ADDCLL:
10339 ifn = IFN_ADD_OVERFLOW;
10340 break;
10341 case BUILT_IN_SUBC:
10342 case BUILT_IN_SUBCL:
10343 case BUILT_IN_SUBCLL:
10344 ifn = IFN_SUB_OVERFLOW;
10345 break;
10346 default:
10347 gcc_unreachable ();
10350 tree type = TREE_TYPE (args[0]);
10351 tree ctype = build_complex_type (type);
10352 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
10353 args[0], args[1]);
10354 tree tgt = save_expr (call);
10355 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
10356 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
10357 call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
10358 intres, args[2]);
10359 tgt = save_expr (call);
10360 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
10361 tree ovfres2 = build1_loc (loc, IMAGPART_EXPR, type, tgt);
10362 ovfres = build2_loc (loc, BIT_IOR_EXPR, type, ovfres, ovfres2);
10363 tree mem_arg3 = build_fold_indirect_ref_loc (loc, args[3]);
10364 tree store
10365 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg3, ovfres);
10366 return build2_loc (loc, COMPOUND_EXPR, type, store, intres);
10369 /* Fold a call to __builtin_FILE to a constant string. */
10371 static inline tree
10372 fold_builtin_FILE (location_t loc)
10374 if (const char *fname = LOCATION_FILE (loc))
10376 /* The documentation says this builtin is equivalent to the preprocessor
10377 __FILE__ macro so it appears appropriate to use the same file prefix
10378 mappings. */
10379 fname = remap_macro_filename (fname);
10380 return build_string_literal (fname);
10383 return build_string_literal ("");
10386 /* Fold a call to __builtin_FUNCTION to a constant string. */
10388 static inline tree
10389 fold_builtin_FUNCTION ()
10391 const char *name = "";
10393 if (current_function_decl)
10394 name = lang_hooks.decl_printable_name (current_function_decl, 0);
10396 return build_string_literal (name);
10399 /* Fold a call to __builtin_LINE to an integer constant. */
10401 static inline tree
10402 fold_builtin_LINE (location_t loc, tree type)
10404 return build_int_cst (type, LOCATION_LINE (loc));
10407 /* Fold a call to built-in function FNDECL with 0 arguments.
10408 This function returns NULL_TREE if no simplification was possible. */
10410 static tree
10411 fold_builtin_0 (location_t loc, tree fndecl)
10413 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10414 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10415 switch (fcode)
10417 case BUILT_IN_FILE:
10418 return fold_builtin_FILE (loc);
10420 case BUILT_IN_FUNCTION:
10421 return fold_builtin_FUNCTION ();
10423 case BUILT_IN_LINE:
10424 return fold_builtin_LINE (loc, type);
10426 CASE_FLT_FN (BUILT_IN_INF):
10427 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
10428 case BUILT_IN_INFD32:
10429 case BUILT_IN_INFD64:
10430 case BUILT_IN_INFD128:
10431 return fold_builtin_inf (loc, type, true);
10433 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10434 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
10435 return fold_builtin_inf (loc, type, false);
10437 case BUILT_IN_CLASSIFY_TYPE:
10438 return fold_builtin_classify_type (NULL_TREE);
10440 case BUILT_IN_UNREACHABLE:
10441 /* Rewrite any explicit calls to __builtin_unreachable. */
10442 if (sanitize_flags_p (SANITIZE_UNREACHABLE))
10443 return build_builtin_unreachable (loc);
10444 break;
10446 default:
10447 break;
10449 return NULL_TREE;
10452 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10453 This function returns NULL_TREE if no simplification was possible. */
10455 static tree
10456 fold_builtin_1 (location_t loc, tree expr, tree fndecl, tree arg0)
10458 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10459 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10461 if (TREE_CODE (arg0) == ERROR_MARK)
10462 return NULL_TREE;
10464 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
10465 return ret;
10467 switch (fcode)
10469 case BUILT_IN_CONSTANT_P:
10471 tree val = fold_builtin_constant_p (arg0);
10473 /* Gimplification will pull the CALL_EXPR for the builtin out of
10474 an if condition. When not optimizing, we'll not CSE it back.
10475 To avoid link error types of regressions, return false now. */
10476 if (!val && !optimize)
10477 val = integer_zero_node;
10479 return val;
10482 case BUILT_IN_CLASSIFY_TYPE:
10483 return fold_builtin_classify_type (arg0);
10485 case BUILT_IN_STRLEN:
10486 return fold_builtin_strlen (loc, expr, type, arg0);
10488 CASE_FLT_FN (BUILT_IN_FABS):
10489 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
10490 case BUILT_IN_FABSD32:
10491 case BUILT_IN_FABSD64:
10492 case BUILT_IN_FABSD128:
10493 return fold_builtin_fabs (loc, arg0, type);
10495 case BUILT_IN_ABS:
10496 case BUILT_IN_LABS:
10497 case BUILT_IN_LLABS:
10498 case BUILT_IN_IMAXABS:
10499 return fold_builtin_abs (loc, arg0, type);
10501 CASE_FLT_FN (BUILT_IN_CONJ):
10502 if (validate_arg (arg0, COMPLEX_TYPE)
10503 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10504 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10505 break;
10507 CASE_FLT_FN (BUILT_IN_CREAL):
10508 if (validate_arg (arg0, COMPLEX_TYPE)
10509 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10510 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
10511 break;
10513 CASE_FLT_FN (BUILT_IN_CIMAG):
10514 if (validate_arg (arg0, COMPLEX_TYPE)
10515 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10516 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10517 break;
10519 CASE_FLT_FN (BUILT_IN_CARG):
10520 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CARG):
10521 return fold_builtin_carg (loc, arg0, type);
10523 case BUILT_IN_ISASCII:
10524 return fold_builtin_isascii (loc, arg0);
10526 case BUILT_IN_TOASCII:
10527 return fold_builtin_toascii (loc, arg0);
10529 case BUILT_IN_ISDIGIT:
10530 return fold_builtin_isdigit (loc, arg0);
10532 CASE_FLT_FN (BUILT_IN_FINITE):
10533 case BUILT_IN_FINITED32:
10534 case BUILT_IN_FINITED64:
10535 case BUILT_IN_FINITED128:
10536 case BUILT_IN_ISFINITE:
10538 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10539 if (ret)
10540 return ret;
10541 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10544 CASE_FLT_FN (BUILT_IN_ISINF):
10545 case BUILT_IN_ISINFD32:
10546 case BUILT_IN_ISINFD64:
10547 case BUILT_IN_ISINFD128:
10549 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10550 if (ret)
10551 return ret;
10552 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10555 case BUILT_IN_ISNORMAL:
10556 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10558 case BUILT_IN_ISINF_SIGN:
10559 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10561 CASE_FLT_FN (BUILT_IN_ISNAN):
10562 case BUILT_IN_ISNAND32:
10563 case BUILT_IN_ISNAND64:
10564 case BUILT_IN_ISNAND128:
10565 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10567 case BUILT_IN_ISSIGNALING:
10568 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISSIGNALING);
10570 case BUILT_IN_FREE:
10571 if (integer_zerop (arg0))
10572 return build_empty_stmt (loc);
10573 break;
10575 case BUILT_IN_CLZG:
10576 case BUILT_IN_CTZG:
10577 case BUILT_IN_CLRSBG:
10578 case BUILT_IN_FFSG:
10579 case BUILT_IN_PARITYG:
10580 case BUILT_IN_POPCOUNTG:
10581 return fold_builtin_bit_query (loc, fcode, arg0, NULL_TREE);
10583 default:
10584 break;
10587 return NULL_TREE;
10591 /* Folds a call EXPR (which may be null) to built-in function FNDECL
10592 with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
10593 if no simplification was possible. */
10595 static tree
10596 fold_builtin_2 (location_t loc, tree expr, tree fndecl, tree arg0, tree arg1)
10598 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10599 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10601 if (TREE_CODE (arg0) == ERROR_MARK
10602 || TREE_CODE (arg1) == ERROR_MARK)
10603 return NULL_TREE;
10605 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
10606 return ret;
10608 switch (fcode)
10610 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10611 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10612 if (validate_arg (arg0, REAL_TYPE)
10613 && validate_arg (arg1, POINTER_TYPE))
10614 return do_mpfr_lgamma_r (arg0, arg1, type);
10615 break;
10617 CASE_FLT_FN (BUILT_IN_FREXP):
10618 return fold_builtin_frexp (loc, arg0, arg1, type);
10620 CASE_FLT_FN (BUILT_IN_MODF):
10621 return fold_builtin_modf (loc, arg0, arg1, type);
10623 case BUILT_IN_STRSPN:
10624 return fold_builtin_strspn (loc, expr, arg0, arg1);
10626 case BUILT_IN_STRCSPN:
10627 return fold_builtin_strcspn (loc, expr, arg0, arg1);
10629 case BUILT_IN_STRPBRK:
10630 return fold_builtin_strpbrk (loc, expr, arg0, arg1, type);
10632 case BUILT_IN_EXPECT:
10633 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
10635 case BUILT_IN_ISGREATER:
10636 return fold_builtin_unordered_cmp (loc, fndecl,
10637 arg0, arg1, UNLE_EXPR, LE_EXPR);
10638 case BUILT_IN_ISGREATEREQUAL:
10639 return fold_builtin_unordered_cmp (loc, fndecl,
10640 arg0, arg1, UNLT_EXPR, LT_EXPR);
10641 case BUILT_IN_ISLESS:
10642 return fold_builtin_unordered_cmp (loc, fndecl,
10643 arg0, arg1, UNGE_EXPR, GE_EXPR);
10644 case BUILT_IN_ISLESSEQUAL:
10645 return fold_builtin_unordered_cmp (loc, fndecl,
10646 arg0, arg1, UNGT_EXPR, GT_EXPR);
10647 case BUILT_IN_ISLESSGREATER:
10648 return fold_builtin_unordered_cmp (loc, fndecl,
10649 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10650 case BUILT_IN_ISUNORDERED:
10651 return fold_builtin_unordered_cmp (loc, fndecl,
10652 arg0, arg1, UNORDERED_EXPR,
10653 NOP_EXPR);
10655 case BUILT_IN_ISEQSIG:
10656 return fold_builtin_iseqsig (loc, arg0, arg1);
10658 /* We do the folding for va_start in the expander. */
10659 case BUILT_IN_VA_START:
10660 break;
10662 case BUILT_IN_OBJECT_SIZE:
10663 case BUILT_IN_DYNAMIC_OBJECT_SIZE:
10664 return fold_builtin_object_size (arg0, arg1, fcode);
10666 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10667 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10669 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10670 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10672 case BUILT_IN_CLZG:
10673 case BUILT_IN_CTZG:
10674 return fold_builtin_bit_query (loc, fcode, arg0, arg1);
10676 default:
10677 break;
10679 return NULL_TREE;
10682 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10683 and ARG2.
10684 This function returns NULL_TREE if no simplification was possible. */
10686 static tree
10687 fold_builtin_3 (location_t loc, tree fndecl,
10688 tree arg0, tree arg1, tree arg2)
10690 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10691 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10693 if (TREE_CODE (arg0) == ERROR_MARK
10694 || TREE_CODE (arg1) == ERROR_MARK
10695 || TREE_CODE (arg2) == ERROR_MARK)
10696 return NULL_TREE;
10698 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
10699 arg0, arg1, arg2))
10700 return ret;
10702 switch (fcode)
10705 CASE_FLT_FN (BUILT_IN_SINCOS):
10706 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10708 CASE_FLT_FN (BUILT_IN_REMQUO):
10709 if (validate_arg (arg0, REAL_TYPE)
10710 && validate_arg (arg1, REAL_TYPE)
10711 && validate_arg (arg2, POINTER_TYPE))
10712 return do_mpfr_remquo (arg0, arg1, arg2);
10713 break;
10715 case BUILT_IN_MEMCMP:
10716 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
10718 case BUILT_IN_EXPECT:
10719 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
10721 case BUILT_IN_EXPECT_WITH_PROBABILITY:
10722 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
10724 case BUILT_IN_ADD_OVERFLOW:
10725 case BUILT_IN_SUB_OVERFLOW:
10726 case BUILT_IN_MUL_OVERFLOW:
10727 case BUILT_IN_ADD_OVERFLOW_P:
10728 case BUILT_IN_SUB_OVERFLOW_P:
10729 case BUILT_IN_MUL_OVERFLOW_P:
10730 case BUILT_IN_SADD_OVERFLOW:
10731 case BUILT_IN_SADDL_OVERFLOW:
10732 case BUILT_IN_SADDLL_OVERFLOW:
10733 case BUILT_IN_SSUB_OVERFLOW:
10734 case BUILT_IN_SSUBL_OVERFLOW:
10735 case BUILT_IN_SSUBLL_OVERFLOW:
10736 case BUILT_IN_SMUL_OVERFLOW:
10737 case BUILT_IN_SMULL_OVERFLOW:
10738 case BUILT_IN_SMULLL_OVERFLOW:
10739 case BUILT_IN_UADD_OVERFLOW:
10740 case BUILT_IN_UADDL_OVERFLOW:
10741 case BUILT_IN_UADDLL_OVERFLOW:
10742 case BUILT_IN_USUB_OVERFLOW:
10743 case BUILT_IN_USUBL_OVERFLOW:
10744 case BUILT_IN_USUBLL_OVERFLOW:
10745 case BUILT_IN_UMUL_OVERFLOW:
10746 case BUILT_IN_UMULL_OVERFLOW:
10747 case BUILT_IN_UMULLL_OVERFLOW:
10748 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10750 default:
10751 break;
10753 return NULL_TREE;
10756 /* Folds a call EXPR (which may be null) to built-in function FNDECL.
10757 ARGS is an array of NARGS arguments. IGNORE is true if the result
10758 of the function call is ignored. This function returns NULL_TREE
10759 if no simplification was possible. */
10761 static tree
10762 fold_builtin_n (location_t loc, tree expr, tree fndecl, tree *args,
10763 int nargs, bool)
10765 tree ret = NULL_TREE;
10767 switch (nargs)
10769 case 0:
10770 ret = fold_builtin_0 (loc, fndecl);
10771 break;
10772 case 1:
10773 ret = fold_builtin_1 (loc, expr, fndecl, args[0]);
10774 break;
10775 case 2:
10776 ret = fold_builtin_2 (loc, expr, fndecl, args[0], args[1]);
10777 break;
10778 case 3:
10779 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
10780 break;
10781 default:
10782 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
10783 break;
10785 if (ret)
10787 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10788 SET_EXPR_LOCATION (ret, loc);
10789 return ret;
10791 return NULL_TREE;
10794 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10795 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10796 of arguments in ARGS to be omitted. OLDNARGS is the number of
10797 elements in ARGS. */
10799 static tree
10800 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10801 int skip, tree fndecl, int n, va_list newargs)
10803 int nargs = oldnargs - skip + n;
10804 tree *buffer;
10806 if (n > 0)
10808 int i, j;
10810 buffer = XALLOCAVEC (tree, nargs);
10811 for (i = 0; i < n; i++)
10812 buffer[i] = va_arg (newargs, tree);
10813 for (j = skip; j < oldnargs; j++, i++)
10814 buffer[i] = args[j];
10816 else
10817 buffer = args + skip;
10819 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10822 /* Return true if FNDECL shouldn't be folded right now.
10823 If a built-in function has an inline attribute always_inline
10824 wrapper, defer folding it after always_inline functions have
10825 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10826 might not be performed. */
10828 bool
10829 avoid_folding_inline_builtin (tree fndecl)
10831 return (DECL_DECLARED_INLINE_P (fndecl)
10832 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10833 && cfun
10834 && !cfun->always_inline_functions_inlined
10835 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10838 /* A wrapper function for builtin folding that prevents warnings for
10839 "statement without effect" and the like, caused by removing the
10840 call node earlier than the warning is generated. */
10842 tree
10843 fold_call_expr (location_t loc, tree exp, bool ignore)
10845 tree ret = NULL_TREE;
10846 tree fndecl = get_callee_fndecl (exp);
10847 if (fndecl && fndecl_built_in_p (fndecl)
10848 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10849 yet. Defer folding until we see all the arguments
10850 (after inlining). */
10851 && !CALL_EXPR_VA_ARG_PACK (exp))
10853 int nargs = call_expr_nargs (exp);
10855 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10856 instead last argument is __builtin_va_arg_pack (). Defer folding
10857 even in that case, until arguments are finalized. */
10858 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10860 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10861 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
10862 return NULL_TREE;
10865 if (avoid_folding_inline_builtin (fndecl))
10866 return NULL_TREE;
10868 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10869 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10870 CALL_EXPR_ARGP (exp), ignore);
10871 else
10873 tree *args = CALL_EXPR_ARGP (exp);
10874 ret = fold_builtin_n (loc, exp, fndecl, args, nargs, ignore);
10875 if (ret)
10876 return ret;
10879 return NULL_TREE;
10882 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10883 N arguments are passed in the array ARGARRAY. Return a folded
10884 expression or NULL_TREE if no simplification was possible. */
10886 tree
10887 fold_builtin_call_array (location_t loc, tree,
10888 tree fn,
10889 int n,
10890 tree *argarray)
10892 if (TREE_CODE (fn) != ADDR_EXPR)
10893 return NULL_TREE;
10895 tree fndecl = TREE_OPERAND (fn, 0);
10896 if (TREE_CODE (fndecl) == FUNCTION_DECL
10897 && fndecl_built_in_p (fndecl))
10899 /* If last argument is __builtin_va_arg_pack (), arguments to this
10900 function are not finalized yet. Defer folding until they are. */
10901 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10903 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10904 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
10905 return NULL_TREE;
10907 if (avoid_folding_inline_builtin (fndecl))
10908 return NULL_TREE;
10909 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10910 return targetm.fold_builtin (fndecl, n, argarray, false);
10911 else
10912 return fold_builtin_n (loc, NULL_TREE, fndecl, argarray, n, false);
10915 return NULL_TREE;
10918 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10919 along with N new arguments specified as the "..." parameters. SKIP
10920 is the number of arguments in EXP to be omitted. This function is used
10921 to do varargs-to-varargs transformations. */
10923 static tree
10924 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10926 va_list ap;
10927 tree t;
10929 va_start (ap, n);
10930 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10931 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10932 va_end (ap);
10934 return t;
10937 /* Validate a single argument ARG against a tree code CODE representing
10938 a type. Return true when argument is valid. */
10940 static bool
10941 validate_arg (const_tree arg, enum tree_code code)
10943 if (!arg)
10944 return false;
10945 else if (code == POINTER_TYPE)
10946 return POINTER_TYPE_P (TREE_TYPE (arg));
10947 else if (code == INTEGER_TYPE)
10948 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10949 return code == TREE_CODE (TREE_TYPE (arg));
10952 /* This function validates the types of a function call argument list
10953 against a specified list of tree_codes. If the last specifier is a 0,
10954 that represents an ellipses, otherwise the last specifier must be a
10955 VOID_TYPE.
10957 This is the GIMPLE version of validate_arglist. Eventually we want to
10958 completely convert builtins.cc to work from GIMPLEs and the tree based
10959 validate_arglist will then be removed. */
10961 bool
10962 validate_gimple_arglist (const gcall *call, ...)
10964 enum tree_code code;
10965 bool res = 0;
10966 va_list ap;
10967 const_tree arg;
10968 size_t i;
10970 va_start (ap, call);
10971 i = 0;
10975 code = (enum tree_code) va_arg (ap, int);
10976 switch (code)
10978 case 0:
10979 /* This signifies an ellipses, any further arguments are all ok. */
10980 res = true;
10981 goto end;
10982 case VOID_TYPE:
10983 /* This signifies an endlink, if no arguments remain, return
10984 true, otherwise return false. */
10985 res = (i == gimple_call_num_args (call));
10986 goto end;
10987 default:
10988 /* If no parameters remain or the parameter's code does not
10989 match the specified code, return false. Otherwise continue
10990 checking any remaining arguments. */
10991 arg = gimple_call_arg (call, i++);
10992 if (!validate_arg (arg, code))
10993 goto end;
10994 break;
10997 while (1);
10999 /* We need gotos here since we can only have one VA_CLOSE in a
11000 function. */
11001 end: ;
11002 va_end (ap);
11004 return res;
11007 /* Default target-specific builtin expander that does nothing. */
11010 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11011 rtx target ATTRIBUTE_UNUSED,
11012 rtx subtarget ATTRIBUTE_UNUSED,
11013 machine_mode mode ATTRIBUTE_UNUSED,
11014 int ignore ATTRIBUTE_UNUSED)
11016 return NULL_RTX;
11019 /* Returns true is EXP represents data that would potentially reside
11020 in a readonly section. */
11022 bool
11023 readonly_data_expr (tree exp)
11025 STRIP_NOPS (exp);
11027 if (TREE_CODE (exp) != ADDR_EXPR)
11028 return false;
11030 exp = get_base_address (TREE_OPERAND (exp, 0));
11031 if (!exp)
11032 return false;
11034 /* Make sure we call decl_readonly_section only for trees it
11035 can handle (since it returns true for everything it doesn't
11036 understand). */
11037 if (TREE_CODE (exp) == STRING_CST
11038 || TREE_CODE (exp) == CONSTRUCTOR
11039 || (VAR_P (exp) && TREE_STATIC (exp)))
11040 return decl_readonly_section (exp, 0);
11041 else
11042 return false;
11045 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11046 to the call, and TYPE is its return type.
11048 Return NULL_TREE if no simplification was possible, otherwise return the
11049 simplified form of the call as a tree.
11051 The simplified form may be a constant or other expression which
11052 computes the same value, but in a more efficient manner (including
11053 calls to other builtin functions).
11055 The call may contain arguments which need to be evaluated, but
11056 which are not useful to determine the result of the call. In
11057 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11058 COMPOUND_EXPR will be an argument which must be evaluated.
11059 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11060 COMPOUND_EXPR in the chain will contain the tree for the simplified
11061 form of the builtin function call. */
11063 static tree
11064 fold_builtin_strpbrk (location_t loc, tree, tree s1, tree s2, tree type)
11066 if (!validate_arg (s1, POINTER_TYPE)
11067 || !validate_arg (s2, POINTER_TYPE))
11068 return NULL_TREE;
11070 tree fn;
11071 const char *p1, *p2;
11073 p2 = c_getstr (s2);
11074 if (p2 == NULL)
11075 return NULL_TREE;
11077 p1 = c_getstr (s1);
11078 if (p1 != NULL)
11080 const char *r = strpbrk (p1, p2);
11081 tree tem;
11083 if (r == NULL)
11084 return build_int_cst (TREE_TYPE (s1), 0);
11086 /* Return an offset into the constant string argument. */
11087 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11088 return fold_convert_loc (loc, type, tem);
11091 if (p2[0] == '\0')
11092 /* strpbrk(x, "") == NULL.
11093 Evaluate and ignore s1 in case it had side-effects. */
11094 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
11096 if (p2[1] != '\0')
11097 return NULL_TREE; /* Really call strpbrk. */
11099 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11100 if (!fn)
11101 return NULL_TREE;
11103 /* New argument list transforming strpbrk(s1, s2) to
11104 strchr(s1, s2[0]). */
11105 return build_call_expr_loc (loc, fn, 2, s1,
11106 build_int_cst (integer_type_node, p2[0]));
11109 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11110 to the call.
11112 Return NULL_TREE if no simplification was possible, otherwise return the
11113 simplified form of the call as a tree.
11115 The simplified form may be a constant or other expression which
11116 computes the same value, but in a more efficient manner (including
11117 calls to other builtin functions).
11119 The call may contain arguments which need to be evaluated, but
11120 which are not useful to determine the result of the call. In
11121 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11122 COMPOUND_EXPR will be an argument which must be evaluated.
11123 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11124 COMPOUND_EXPR in the chain will contain the tree for the simplified
11125 form of the builtin function call. */
11127 static tree
11128 fold_builtin_strspn (location_t loc, tree expr, tree s1, tree s2)
11130 if (!validate_arg (s1, POINTER_TYPE)
11131 || !validate_arg (s2, POINTER_TYPE))
11132 return NULL_TREE;
11134 if (!check_nul_terminated_array (expr, s1)
11135 || !check_nul_terminated_array (expr, s2))
11136 return NULL_TREE;
11138 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11140 /* If either argument is "", return NULL_TREE. */
11141 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11142 /* Evaluate and ignore both arguments in case either one has
11143 side-effects. */
11144 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11145 s1, s2);
11146 return NULL_TREE;
11149 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11150 to the call.
11152 Return NULL_TREE if no simplification was possible, otherwise return the
11153 simplified form of the call as a tree.
11155 The simplified form may be a constant or other expression which
11156 computes the same value, but in a more efficient manner (including
11157 calls to other builtin functions).
11159 The call may contain arguments which need to be evaluated, but
11160 which are not useful to determine the result of the call. In
11161 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11162 COMPOUND_EXPR will be an argument which must be evaluated.
11163 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11164 COMPOUND_EXPR in the chain will contain the tree for the simplified
11165 form of the builtin function call. */
11167 static tree
11168 fold_builtin_strcspn (location_t loc, tree expr, tree s1, tree s2)
11170 if (!validate_arg (s1, POINTER_TYPE)
11171 || !validate_arg (s2, POINTER_TYPE))
11172 return NULL_TREE;
11174 if (!check_nul_terminated_array (expr, s1)
11175 || !check_nul_terminated_array (expr, s2))
11176 return NULL_TREE;
11178 /* If the first argument is "", return NULL_TREE. */
11179 const char *p1 = c_getstr (s1);
11180 if (p1 && *p1 == '\0')
11182 /* Evaluate and ignore argument s2 in case it has
11183 side-effects. */
11184 return omit_one_operand_loc (loc, size_type_node,
11185 size_zero_node, s2);
11188 /* If the second argument is "", return __builtin_strlen(s1). */
11189 const char *p2 = c_getstr (s2);
11190 if (p2 && *p2 == '\0')
11192 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11194 /* If the replacement _DECL isn't initialized, don't do the
11195 transformation. */
11196 if (!fn)
11197 return NULL_TREE;
11199 return build_call_expr_loc (loc, fn, 1, s1);
11201 return NULL_TREE;
11204 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11205 produced. False otherwise. This is done so that we don't output the error
11206 or warning twice or three times. */
11208 bool
11209 fold_builtin_next_arg (tree exp, bool va_start_p)
11211 tree fntype = TREE_TYPE (current_function_decl);
11212 int nargs = call_expr_nargs (exp);
11213 tree arg;
11214 /* There is good chance the current input_location points inside the
11215 definition of the va_start macro (perhaps on the token for
11216 builtin) in a system header, so warnings will not be emitted.
11217 Use the location in real source code. */
11218 location_t current_location =
11219 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11220 NULL);
11222 if (!stdarg_p (fntype))
11224 error ("%<va_start%> used in function with fixed arguments");
11225 return true;
11228 if (va_start_p)
11230 if (va_start_p && (nargs != 2))
11232 error ("wrong number of arguments to function %<va_start%>");
11233 return true;
11235 arg = CALL_EXPR_ARG (exp, 1);
11237 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11238 when we checked the arguments and if needed issued a warning. */
11239 else
11241 if (nargs == 0)
11243 /* Evidently an out of date version of <stdarg.h>; can't validate
11244 va_start's second argument, but can still work as intended. */
11245 warning_at (current_location,
11246 OPT_Wvarargs,
11247 "%<__builtin_next_arg%> called without an argument");
11248 return true;
11250 else if (nargs > 1)
11252 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11253 return true;
11255 arg = CALL_EXPR_ARG (exp, 0);
11258 if (TREE_CODE (arg) == SSA_NAME
11259 && SSA_NAME_VAR (arg))
11260 arg = SSA_NAME_VAR (arg);
11262 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11263 or __builtin_next_arg (0) the first time we see it, after checking
11264 the arguments and if needed issuing a warning. */
11265 if (!integer_zerop (arg))
11267 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11269 /* Strip off all nops for the sake of the comparison. This
11270 is not quite the same as STRIP_NOPS. It does more.
11271 We must also strip off INDIRECT_EXPR for C++ reference
11272 parameters. */
11273 while (CONVERT_EXPR_P (arg)
11274 || INDIRECT_REF_P (arg))
11275 arg = TREE_OPERAND (arg, 0);
11276 if (arg != last_parm)
11278 /* FIXME: Sometimes with the tree optimizers we can get the
11279 not the last argument even though the user used the last
11280 argument. We just warn and set the arg to be the last
11281 argument so that we will get wrong-code because of
11282 it. */
11283 warning_at (current_location,
11284 OPT_Wvarargs,
11285 "second parameter of %<va_start%> not last named argument");
11288 /* Undefined by C99 7.15.1.4p4 (va_start):
11289 "If the parameter parmN is declared with the register storage
11290 class, with a function or array type, or with a type that is
11291 not compatible with the type that results after application of
11292 the default argument promotions, the behavior is undefined."
11294 else if (DECL_REGISTER (arg))
11296 warning_at (current_location,
11297 OPT_Wvarargs,
11298 "undefined behavior when second parameter of "
11299 "%<va_start%> is declared with %<register%> storage");
11302 /* We want to verify the second parameter just once before the tree
11303 optimizers are run and then avoid keeping it in the tree,
11304 as otherwise we could warn even for correct code like:
11305 void foo (int i, ...)
11306 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11307 if (va_start_p)
11308 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11309 else
11310 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11312 return false;
11316 /* Expand a call EXP to __builtin_object_size. */
11318 static rtx
11319 expand_builtin_object_size (tree exp)
11321 tree ost;
11322 int object_size_type;
11323 tree fndecl = get_callee_fndecl (exp);
11325 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11327 error ("first argument of %qD must be a pointer, second integer constant",
11328 fndecl);
11329 expand_builtin_trap ();
11330 return const0_rtx;
11333 ost = CALL_EXPR_ARG (exp, 1);
11334 STRIP_NOPS (ost);
11336 if (TREE_CODE (ost) != INTEGER_CST
11337 || tree_int_cst_sgn (ost) < 0
11338 || compare_tree_int (ost, 3) > 0)
11340 error ("last argument of %qD is not integer constant between 0 and 3",
11341 fndecl);
11342 expand_builtin_trap ();
11343 return const0_rtx;
11346 object_size_type = tree_to_shwi (ost);
11348 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11351 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11352 FCODE is the BUILT_IN_* to use.
11353 Return NULL_RTX if we failed; the caller should emit a normal call,
11354 otherwise try to get the result in TARGET, if convenient (and in
11355 mode MODE if that's convenient). */
11357 static rtx
11358 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
11359 enum built_in_function fcode)
11361 if (!validate_arglist (exp,
11362 POINTER_TYPE,
11363 fcode == BUILT_IN_MEMSET_CHK
11364 ? INTEGER_TYPE : POINTER_TYPE,
11365 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11366 return NULL_RTX;
11368 tree dest = CALL_EXPR_ARG (exp, 0);
11369 tree src = CALL_EXPR_ARG (exp, 1);
11370 tree len = CALL_EXPR_ARG (exp, 2);
11371 tree size = CALL_EXPR_ARG (exp, 3);
11373 /* FIXME: Set access mode to write only for memset et al. */
11374 bool sizes_ok = check_access (exp, len, /*maxread=*/NULL_TREE,
11375 /*srcstr=*/NULL_TREE, size, access_read_write);
11377 if (!tree_fits_uhwi_p (size))
11378 return NULL_RTX;
11380 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11382 /* Avoid transforming the checking call to an ordinary one when
11383 an overflow has been detected or when the call couldn't be
11384 validated because the size is not constant. */
11385 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
11386 return NULL_RTX;
11388 tree fn = NULL_TREE;
11389 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11390 mem{cpy,pcpy,move,set} is available. */
11391 switch (fcode)
11393 case BUILT_IN_MEMCPY_CHK:
11394 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11395 break;
11396 case BUILT_IN_MEMPCPY_CHK:
11397 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11398 break;
11399 case BUILT_IN_MEMMOVE_CHK:
11400 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11401 break;
11402 case BUILT_IN_MEMSET_CHK:
11403 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11404 break;
11405 default:
11406 break;
11409 if (! fn)
11410 return NULL_RTX;
11412 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11413 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11414 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11415 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11417 else if (fcode == BUILT_IN_MEMSET_CHK)
11418 return NULL_RTX;
11419 else
11421 unsigned int dest_align = get_pointer_alignment (dest);
11423 /* If DEST is not a pointer type, call the normal function. */
11424 if (dest_align == 0)
11425 return NULL_RTX;
11427 /* If SRC and DEST are the same (and not volatile), do nothing. */
11428 if (operand_equal_p (src, dest, 0))
11430 tree expr;
11432 if (fcode != BUILT_IN_MEMPCPY_CHK)
11434 /* Evaluate and ignore LEN in case it has side-effects. */
11435 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11436 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11439 expr = fold_build_pointer_plus (dest, len);
11440 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11443 /* __memmove_chk special case. */
11444 if (fcode == BUILT_IN_MEMMOVE_CHK)
11446 unsigned int src_align = get_pointer_alignment (src);
11448 if (src_align == 0)
11449 return NULL_RTX;
11451 /* If src is categorized for a readonly section we can use
11452 normal __memcpy_chk. */
11453 if (readonly_data_expr (src))
11455 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11456 if (!fn)
11457 return NULL_RTX;
11458 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11459 dest, src, len, size);
11460 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11461 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11462 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11465 return NULL_RTX;
11469 /* Emit warning if a buffer overflow is detected at compile time. */
11471 static void
11472 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11474 /* The source string. */
11475 tree srcstr = NULL_TREE;
11476 /* The size of the destination object returned by __builtin_object_size. */
11477 tree objsize = NULL_TREE;
11478 /* The string that is being concatenated with (as in __strcat_chk)
11479 or null if it isn't. */
11480 tree catstr = NULL_TREE;
11481 /* The maximum length of the source sequence in a bounded operation
11482 (such as __strncat_chk) or null if the operation isn't bounded
11483 (such as __strcat_chk). */
11484 tree maxread = NULL_TREE;
11485 /* The exact size of the access (such as in __strncpy_chk). */
11486 tree size = NULL_TREE;
11487 /* The access by the function that's checked. Except for snprintf
11488 both writing and reading is checked. */
11489 access_mode mode = access_read_write;
11491 switch (fcode)
11493 case BUILT_IN_STRCPY_CHK:
11494 case BUILT_IN_STPCPY_CHK:
11495 srcstr = CALL_EXPR_ARG (exp, 1);
11496 objsize = CALL_EXPR_ARG (exp, 2);
11497 break;
11499 case BUILT_IN_STRCAT_CHK:
11500 /* For __strcat_chk the warning will be emitted only if overflowing
11501 by at least strlen (dest) + 1 bytes. */
11502 catstr = CALL_EXPR_ARG (exp, 0);
11503 srcstr = CALL_EXPR_ARG (exp, 1);
11504 objsize = CALL_EXPR_ARG (exp, 2);
11505 break;
11507 case BUILT_IN_STRNCAT_CHK:
11508 catstr = CALL_EXPR_ARG (exp, 0);
11509 srcstr = CALL_EXPR_ARG (exp, 1);
11510 maxread = CALL_EXPR_ARG (exp, 2);
11511 objsize = CALL_EXPR_ARG (exp, 3);
11512 break;
11514 case BUILT_IN_STRNCPY_CHK:
11515 case BUILT_IN_STPNCPY_CHK:
11516 srcstr = CALL_EXPR_ARG (exp, 1);
11517 size = CALL_EXPR_ARG (exp, 2);
11518 objsize = CALL_EXPR_ARG (exp, 3);
11519 break;
11521 case BUILT_IN_SNPRINTF_CHK:
11522 case BUILT_IN_VSNPRINTF_CHK:
11523 maxread = CALL_EXPR_ARG (exp, 1);
11524 objsize = CALL_EXPR_ARG (exp, 3);
11525 /* The only checked access the write to the destination. */
11526 mode = access_write_only;
11527 break;
11528 default:
11529 gcc_unreachable ();
11532 if (catstr && maxread)
11534 /* Check __strncat_chk. There is no way to determine the length
11535 of the string to which the source string is being appended so
11536 just warn when the length of the source string is not known. */
11537 check_strncat_sizes (exp, objsize);
11538 return;
11541 check_access (exp, size, maxread, srcstr, objsize, mode);
11544 /* Emit warning if a buffer overflow is detected at compile time
11545 in __sprintf_chk/__vsprintf_chk calls. */
11547 static void
11548 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11550 tree size, len, fmt;
11551 const char *fmt_str;
11552 int nargs = call_expr_nargs (exp);
11554 /* Verify the required arguments in the original call. */
11556 if (nargs < 4)
11557 return;
11558 size = CALL_EXPR_ARG (exp, 2);
11559 fmt = CALL_EXPR_ARG (exp, 3);
11561 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11562 return;
11564 /* Check whether the format is a literal string constant. */
11565 fmt_str = c_getstr (fmt);
11566 if (fmt_str == NULL)
11567 return;
11569 if (!init_target_chars ())
11570 return;
11572 /* If the format doesn't contain % args or %%, we know its size. */
11573 if (strchr (fmt_str, target_percent) == 0)
11574 len = build_int_cstu (size_type_node, strlen (fmt_str));
11575 /* If the format is "%s" and first ... argument is a string literal,
11576 we know it too. */
11577 else if (fcode == BUILT_IN_SPRINTF_CHK
11578 && strcmp (fmt_str, target_percent_s) == 0)
11580 tree arg;
11582 if (nargs < 5)
11583 return;
11584 arg = CALL_EXPR_ARG (exp, 4);
11585 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11586 return;
11588 len = c_strlen (arg, 1);
11589 if (!len || ! tree_fits_uhwi_p (len))
11590 return;
11592 else
11593 return;
11595 /* Add one for the terminating nul. */
11596 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
11598 check_access (exp, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, len, size,
11599 access_write_only);
11602 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11603 if possible. */
11605 static tree
11606 fold_builtin_object_size (tree ptr, tree ost, enum built_in_function fcode)
11608 tree bytes;
11609 int object_size_type;
11611 if (!validate_arg (ptr, POINTER_TYPE)
11612 || !validate_arg (ost, INTEGER_TYPE))
11613 return NULL_TREE;
11615 STRIP_NOPS (ost);
11617 if (TREE_CODE (ost) != INTEGER_CST
11618 || tree_int_cst_sgn (ost) < 0
11619 || compare_tree_int (ost, 3) > 0)
11620 return NULL_TREE;
11622 object_size_type = tree_to_shwi (ost);
11624 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11625 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11626 and (size_t) 0 for types 2 and 3. */
11627 if (TREE_SIDE_EFFECTS (ptr))
11628 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11630 if (fcode == BUILT_IN_DYNAMIC_OBJECT_SIZE)
11631 object_size_type |= OST_DYNAMIC;
11633 if (TREE_CODE (ptr) == ADDR_EXPR)
11635 compute_builtin_object_size (ptr, object_size_type, &bytes);
11636 if ((object_size_type & OST_DYNAMIC)
11637 || int_fits_type_p (bytes, size_type_node))
11638 return fold_convert (size_type_node, bytes);
11640 else if (TREE_CODE (ptr) == SSA_NAME)
11642 /* If object size is not known yet, delay folding until
11643 later. Maybe subsequent passes will help determining
11644 it. */
11645 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
11646 && ((object_size_type & OST_DYNAMIC)
11647 || int_fits_type_p (bytes, size_type_node)))
11648 return fold_convert (size_type_node, bytes);
11651 return NULL_TREE;
11654 /* Builtins with folding operations that operate on "..." arguments
11655 need special handling; we need to store the arguments in a convenient
11656 data structure before attempting any folding. Fortunately there are
11657 only a few builtins that fall into this category. FNDECL is the
11658 function, EXP is the CALL_EXPR for the call. */
11660 static tree
11661 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11663 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11664 tree ret = NULL_TREE;
11666 switch (fcode)
11668 case BUILT_IN_FPCLASSIFY:
11669 ret = fold_builtin_fpclassify (loc, args, nargs);
11670 break;
11672 case BUILT_IN_ADDC:
11673 case BUILT_IN_ADDCL:
11674 case BUILT_IN_ADDCLL:
11675 case BUILT_IN_SUBC:
11676 case BUILT_IN_SUBCL:
11677 case BUILT_IN_SUBCLL:
11678 return fold_builtin_addc_subc (loc, fcode, args);
11680 default:
11681 break;
11683 if (ret)
11685 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11686 SET_EXPR_LOCATION (ret, loc);
11687 suppress_warning (ret);
11688 return ret;
11690 return NULL_TREE;
11693 /* Initialize format string characters in the target charset. */
11695 bool
11696 init_target_chars (void)
11698 static bool init;
11699 if (!init)
11701 target_newline = lang_hooks.to_target_charset ('\n');
11702 target_percent = lang_hooks.to_target_charset ('%');
11703 target_c = lang_hooks.to_target_charset ('c');
11704 target_s = lang_hooks.to_target_charset ('s');
11705 if (target_newline == 0 || target_percent == 0 || target_c == 0
11706 || target_s == 0)
11707 return false;
11709 target_percent_c[0] = target_percent;
11710 target_percent_c[1] = target_c;
11711 target_percent_c[2] = '\0';
11713 target_percent_s[0] = target_percent;
11714 target_percent_s[1] = target_s;
11715 target_percent_s[2] = '\0';
11717 target_percent_s_newline[0] = target_percent;
11718 target_percent_s_newline[1] = target_s;
11719 target_percent_s_newline[2] = target_newline;
11720 target_percent_s_newline[3] = '\0';
11722 init = true;
11724 return true;
11727 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11728 and no overflow/underflow occurred. INEXACT is true if M was not
11729 exactly calculated. TYPE is the tree type for the result. This
11730 function assumes that you cleared the MPFR flags and then
11731 calculated M to see if anything subsequently set a flag prior to
11732 entering this function. Return NULL_TREE if any checks fail. */
11734 static tree
11735 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11737 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11738 overflow/underflow occurred. If -frounding-math, proceed iff the
11739 result of calling FUNC was exact. */
11740 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11741 && (!flag_rounding_math || !inexact))
11743 REAL_VALUE_TYPE rr;
11745 real_from_mpfr (&rr, m, type, MPFR_RNDN);
11746 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11747 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11748 but the mpfr_t is not, then we underflowed in the
11749 conversion. */
11750 if (real_isfinite (&rr)
11751 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11753 REAL_VALUE_TYPE rmode;
11755 real_convert (&rmode, TYPE_MODE (type), &rr);
11756 /* Proceed iff the specified mode can hold the value. */
11757 if (real_identical (&rmode, &rr))
11758 return build_real (type, rmode);
11761 return NULL_TREE;
11764 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11765 number and no overflow/underflow occurred. INEXACT is true if M
11766 was not exactly calculated. TYPE is the tree type for the result.
11767 This function assumes that you cleared the MPFR flags and then
11768 calculated M to see if anything subsequently set a flag prior to
11769 entering this function. Return NULL_TREE if any checks fail, if
11770 FORCE_CONVERT is true, then bypass the checks. */
11772 static tree
11773 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11775 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11776 overflow/underflow occurred. If -frounding-math, proceed iff the
11777 result of calling FUNC was exact. */
11778 if (force_convert
11779 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11780 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11781 && (!flag_rounding_math || !inexact)))
11783 REAL_VALUE_TYPE re, im;
11785 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), MPFR_RNDN);
11786 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), MPFR_RNDN);
11787 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11788 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11789 but the mpfr_t is not, then we underflowed in the
11790 conversion. */
11791 if (force_convert
11792 || (real_isfinite (&re) && real_isfinite (&im)
11793 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11794 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11796 REAL_VALUE_TYPE re_mode, im_mode;
11798 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11799 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11800 /* Proceed iff the specified mode can hold the value. */
11801 if (force_convert
11802 || (real_identical (&re_mode, &re)
11803 && real_identical (&im_mode, &im)))
11804 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11805 build_real (TREE_TYPE (type), im_mode));
11808 return NULL_TREE;
11811 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
11812 the pointer *(ARG_QUO) and return the result. The type is taken
11813 from the type of ARG0 and is used for setting the precision of the
11814 calculation and results. */
11816 static tree
11817 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
11819 tree const type = TREE_TYPE (arg0);
11820 tree result = NULL_TREE;
11822 STRIP_NOPS (arg0);
11823 STRIP_NOPS (arg1);
11825 /* To proceed, MPFR must exactly represent the target floating point
11826 format, which only happens when the target base equals two. */
11827 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11828 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
11829 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
11831 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
11832 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
11834 if (real_isfinite (ra0) && real_isfinite (ra1))
11836 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11837 const int prec = fmt->p;
11838 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
11839 tree result_rem;
11840 long integer_quo;
11841 mpfr_t m0, m1;
11843 mpfr_inits2 (prec, m0, m1, NULL);
11844 mpfr_from_real (m0, ra0, MPFR_RNDN);
11845 mpfr_from_real (m1, ra1, MPFR_RNDN);
11846 mpfr_clear_flags ();
11847 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
11848 /* Remquo is independent of the rounding mode, so pass
11849 inexact=0 to do_mpfr_ckconv(). */
11850 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
11851 mpfr_clears (m0, m1, NULL);
11852 if (result_rem)
11854 /* MPFR calculates quo in the host's long so it may
11855 return more bits in quo than the target int can hold
11856 if sizeof(host long) > sizeof(target int). This can
11857 happen even for native compilers in LP64 mode. In
11858 these cases, modulo the quo value with the largest
11859 number that the target int can hold while leaving one
11860 bit for the sign. */
11861 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
11862 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
11864 /* Dereference the quo pointer argument. */
11865 arg_quo = build_fold_indirect_ref (arg_quo);
11866 /* Proceed iff a valid pointer type was passed in. */
11867 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
11869 /* Set the value. */
11870 tree result_quo
11871 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
11872 build_int_cst (TREE_TYPE (arg_quo),
11873 integer_quo));
11874 TREE_SIDE_EFFECTS (result_quo) = 1;
11875 /* Combine the quo assignment with the rem. */
11876 result = fold_build2 (COMPOUND_EXPR, type,
11877 result_quo, result_rem);
11878 suppress_warning (result, OPT_Wunused_value);
11879 result = non_lvalue (result);
11884 return result;
11887 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
11888 resulting value as a tree with type TYPE. The mpfr precision is
11889 set to the precision of TYPE. We assume that this mpfr function
11890 returns zero if the result could be calculated exactly within the
11891 requested precision. In addition, the integer pointer represented
11892 by ARG_SG will be dereferenced and set to the appropriate signgam
11893 (-1,1) value. */
11895 static tree
11896 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
11898 tree result = NULL_TREE;
11900 STRIP_NOPS (arg);
11902 /* To proceed, MPFR must exactly represent the target floating point
11903 format, which only happens when the target base equals two. Also
11904 verify ARG is a constant and that ARG_SG is an int pointer. */
11905 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11906 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
11907 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
11908 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
11910 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
11912 /* In addition to NaN and Inf, the argument cannot be zero or a
11913 negative integer. */
11914 if (real_isfinite (ra)
11915 && ra->cl != rvc_zero
11916 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
11918 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11919 const int prec = fmt->p;
11920 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
11921 int inexact, sg;
11922 tree result_lg;
11924 auto_mpfr m (prec);
11925 mpfr_from_real (m, ra, MPFR_RNDN);
11926 mpfr_clear_flags ();
11927 inexact = mpfr_lgamma (m, &sg, m, rnd);
11928 result_lg = do_mpfr_ckconv (m, type, inexact);
11929 if (result_lg)
11931 tree result_sg;
11933 /* Dereference the arg_sg pointer argument. */
11934 arg_sg = build_fold_indirect_ref (arg_sg);
11935 /* Assign the signgam value into *arg_sg. */
11936 result_sg = fold_build2 (MODIFY_EXPR,
11937 TREE_TYPE (arg_sg), arg_sg,
11938 build_int_cst (TREE_TYPE (arg_sg), sg));
11939 TREE_SIDE_EFFECTS (result_sg) = 1;
11940 /* Combine the signgam assignment with the lgamma result. */
11941 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11942 result_sg, result_lg));
11947 return result;
11950 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
11951 mpc function FUNC on it and return the resulting value as a tree
11952 with type TYPE. The mpfr precision is set to the precision of
11953 TYPE. We assume that function FUNC returns zero if the result
11954 could be calculated exactly within the requested precision. If
11955 DO_NONFINITE is true, then fold expressions containing Inf or NaN
11956 in the arguments and/or results. */
11958 tree
11959 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
11960 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
11962 tree result = NULL_TREE;
11964 STRIP_NOPS (arg0);
11965 STRIP_NOPS (arg1);
11967 /* To proceed, MPFR must exactly represent the target floating point
11968 format, which only happens when the target base equals two. */
11969 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
11970 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_TYPE (arg0)))
11971 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
11972 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_TYPE (arg1)))
11973 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
11975 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
11976 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
11977 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
11978 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
11980 if (do_nonfinite
11981 || (real_isfinite (re0) && real_isfinite (im0)
11982 && real_isfinite (re1) && real_isfinite (im1)))
11984 const struct real_format *const fmt =
11985 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11986 const int prec = fmt->p;
11987 const mpfr_rnd_t rnd = fmt->round_towards_zero
11988 ? MPFR_RNDZ : MPFR_RNDN;
11989 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11990 int inexact;
11991 mpc_t m0, m1;
11993 mpc_init2 (m0, prec);
11994 mpc_init2 (m1, prec);
11995 mpfr_from_real (mpc_realref (m0), re0, rnd);
11996 mpfr_from_real (mpc_imagref (m0), im0, rnd);
11997 mpfr_from_real (mpc_realref (m1), re1, rnd);
11998 mpfr_from_real (mpc_imagref (m1), im1, rnd);
11999 mpfr_clear_flags ();
12000 inexact = func (m0, m0, m1, crnd);
12001 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
12002 mpc_clear (m0);
12003 mpc_clear (m1);
12007 return result;
12010 /* A wrapper function for builtin folding that prevents warnings for
12011 "statement without effect" and the like, caused by removing the
12012 call node earlier than the warning is generated. */
12014 tree
12015 fold_call_stmt (gcall *stmt, bool ignore)
12017 tree ret = NULL_TREE;
12018 tree fndecl = gimple_call_fndecl (stmt);
12019 location_t loc = gimple_location (stmt);
12020 if (fndecl && fndecl_built_in_p (fndecl)
12021 && !gimple_call_va_arg_pack_p (stmt))
12023 int nargs = gimple_call_num_args (stmt);
12024 tree *args = (nargs > 0
12025 ? gimple_call_arg_ptr (stmt, 0)
12026 : &error_mark_node);
12028 if (avoid_folding_inline_builtin (fndecl))
12029 return NULL_TREE;
12030 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12032 return targetm.fold_builtin (fndecl, nargs, args, ignore);
12034 else
12036 ret = fold_builtin_n (loc, NULL_TREE, fndecl, args, nargs, ignore);
12037 if (ret)
12039 /* Propagate location information from original call to
12040 expansion of builtin. Otherwise things like
12041 maybe_emit_chk_warning, that operate on the expansion
12042 of a builtin, will use the wrong location information. */
12043 if (gimple_has_location (stmt))
12045 tree realret = ret;
12046 if (TREE_CODE (ret) == NOP_EXPR)
12047 realret = TREE_OPERAND (ret, 0);
12048 if (CAN_HAVE_LOCATION_P (realret)
12049 && !EXPR_HAS_LOCATION (realret))
12050 SET_EXPR_LOCATION (realret, loc);
12051 return realret;
12053 return ret;
12057 return NULL_TREE;
12060 /* Look up the function in builtin_decl that corresponds to DECL
12061 and set ASMSPEC as its user assembler name. DECL must be a
12062 function decl that declares a builtin. */
12064 void
12065 set_builtin_user_assembler_name (tree decl, const char *asmspec)
12067 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
12068 && asmspec != 0);
12070 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12071 set_user_assembler_name (builtin, asmspec);
12073 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
12074 && INT_TYPE_SIZE < BITS_PER_WORD)
12076 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
12077 set_user_assembler_libfunc ("ffs", asmspec);
12078 set_optab_libfunc (ffs_optab, mode, "ffs");
12082 /* Return true if DECL is a builtin that expands to a constant or similarly
12083 simple code. */
12084 bool
12085 is_simple_builtin (tree decl)
12087 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
12088 switch (DECL_FUNCTION_CODE (decl))
12090 /* Builtins that expand to constants. */
12091 case BUILT_IN_CONSTANT_P:
12092 case BUILT_IN_EXPECT:
12093 case BUILT_IN_OBJECT_SIZE:
12094 case BUILT_IN_UNREACHABLE:
12095 /* Simple register moves or loads from stack. */
12096 case BUILT_IN_ASSUME_ALIGNED:
12097 case BUILT_IN_RETURN_ADDRESS:
12098 case BUILT_IN_EXTRACT_RETURN_ADDR:
12099 case BUILT_IN_FROB_RETURN_ADDR:
12100 case BUILT_IN_RETURN:
12101 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12102 case BUILT_IN_FRAME_ADDRESS:
12103 case BUILT_IN_VA_END:
12104 case BUILT_IN_STACK_SAVE:
12105 case BUILT_IN_STACK_RESTORE:
12106 case BUILT_IN_DWARF_CFA:
12107 /* Exception state returns or moves registers around. */
12108 case BUILT_IN_EH_FILTER:
12109 case BUILT_IN_EH_POINTER:
12110 case BUILT_IN_EH_COPY_VALUES:
12111 return true;
12113 default:
12114 return false;
12117 return false;
12120 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12121 most probably expanded inline into reasonably simple code. This is a
12122 superset of is_simple_builtin. */
12123 bool
12124 is_inexpensive_builtin (tree decl)
12126 if (!decl)
12127 return false;
12128 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12129 return true;
12130 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12131 switch (DECL_FUNCTION_CODE (decl))
12133 case BUILT_IN_ABS:
12134 CASE_BUILT_IN_ALLOCA:
12135 case BUILT_IN_BSWAP16:
12136 case BUILT_IN_BSWAP32:
12137 case BUILT_IN_BSWAP64:
12138 case BUILT_IN_BSWAP128:
12139 case BUILT_IN_CLZ:
12140 case BUILT_IN_CLZIMAX:
12141 case BUILT_IN_CLZL:
12142 case BUILT_IN_CLZLL:
12143 case BUILT_IN_CTZ:
12144 case BUILT_IN_CTZIMAX:
12145 case BUILT_IN_CTZL:
12146 case BUILT_IN_CTZLL:
12147 case BUILT_IN_FFS:
12148 case BUILT_IN_FFSIMAX:
12149 case BUILT_IN_FFSL:
12150 case BUILT_IN_FFSLL:
12151 case BUILT_IN_IMAXABS:
12152 case BUILT_IN_FINITE:
12153 case BUILT_IN_FINITEF:
12154 case BUILT_IN_FINITEL:
12155 case BUILT_IN_FINITED32:
12156 case BUILT_IN_FINITED64:
12157 case BUILT_IN_FINITED128:
12158 case BUILT_IN_FPCLASSIFY:
12159 case BUILT_IN_ISFINITE:
12160 case BUILT_IN_ISINF_SIGN:
12161 case BUILT_IN_ISINF:
12162 case BUILT_IN_ISINFF:
12163 case BUILT_IN_ISINFL:
12164 case BUILT_IN_ISINFD32:
12165 case BUILT_IN_ISINFD64:
12166 case BUILT_IN_ISINFD128:
12167 case BUILT_IN_ISNAN:
12168 case BUILT_IN_ISNANF:
12169 case BUILT_IN_ISNANL:
12170 case BUILT_IN_ISNAND32:
12171 case BUILT_IN_ISNAND64:
12172 case BUILT_IN_ISNAND128:
12173 case BUILT_IN_ISNORMAL:
12174 case BUILT_IN_ISGREATER:
12175 case BUILT_IN_ISGREATEREQUAL:
12176 case BUILT_IN_ISLESS:
12177 case BUILT_IN_ISLESSEQUAL:
12178 case BUILT_IN_ISLESSGREATER:
12179 case BUILT_IN_ISUNORDERED:
12180 case BUILT_IN_ISEQSIG:
12181 case BUILT_IN_VA_ARG_PACK:
12182 case BUILT_IN_VA_ARG_PACK_LEN:
12183 case BUILT_IN_VA_COPY:
12184 case BUILT_IN_TRAP:
12185 case BUILT_IN_UNREACHABLE_TRAP:
12186 case BUILT_IN_SAVEREGS:
12187 case BUILT_IN_POPCOUNTL:
12188 case BUILT_IN_POPCOUNTLL:
12189 case BUILT_IN_POPCOUNTIMAX:
12190 case BUILT_IN_POPCOUNT:
12191 case BUILT_IN_PARITYL:
12192 case BUILT_IN_PARITYLL:
12193 case BUILT_IN_PARITYIMAX:
12194 case BUILT_IN_PARITY:
12195 case BUILT_IN_LABS:
12196 case BUILT_IN_LLABS:
12197 case BUILT_IN_PREFETCH:
12198 case BUILT_IN_ACC_ON_DEVICE:
12199 return true;
12201 default:
12202 return is_simple_builtin (decl);
12205 return false;
12208 /* Return true if T is a constant and the value cast to a target char
12209 can be represented by a host char.
12210 Store the casted char constant in *P if so. */
12212 bool
12213 target_char_cst_p (tree t, char *p)
12215 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
12216 return false;
12218 *p = (char)tree_to_uhwi (t);
12219 return true;
12222 /* Return true if the builtin DECL is implemented in a standard library.
12223 Otherwise return false which doesn't guarantee it is not (thus the list
12224 of handled builtins below may be incomplete). */
12226 bool
12227 builtin_with_linkage_p (tree decl)
12229 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12230 switch (DECL_FUNCTION_CODE (decl))
12232 CASE_FLT_FN (BUILT_IN_ACOS):
12233 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ACOS):
12234 CASE_FLT_FN (BUILT_IN_ACOSH):
12235 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ACOSH):
12236 CASE_FLT_FN (BUILT_IN_ASIN):
12237 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ASIN):
12238 CASE_FLT_FN (BUILT_IN_ASINH):
12239 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ASINH):
12240 CASE_FLT_FN (BUILT_IN_ATAN):
12241 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ATAN):
12242 CASE_FLT_FN (BUILT_IN_ATANH):
12243 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ATANH):
12244 CASE_FLT_FN (BUILT_IN_ATAN2):
12245 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ATAN2):
12246 CASE_FLT_FN (BUILT_IN_CBRT):
12247 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CBRT):
12248 CASE_FLT_FN (BUILT_IN_CEIL):
12249 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
12250 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12251 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
12252 CASE_FLT_FN (BUILT_IN_COS):
12253 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COS):
12254 CASE_FLT_FN (BUILT_IN_COSH):
12255 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COSH):
12256 CASE_FLT_FN (BUILT_IN_ERF):
12257 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ERF):
12258 CASE_FLT_FN (BUILT_IN_ERFC):
12259 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ERFC):
12260 CASE_FLT_FN (BUILT_IN_EXP):
12261 CASE_FLT_FN_FLOATN_NX (BUILT_IN_EXP):
12262 CASE_FLT_FN (BUILT_IN_EXP2):
12263 CASE_FLT_FN_FLOATN_NX (BUILT_IN_EXP2):
12264 CASE_FLT_FN (BUILT_IN_EXPM1):
12265 CASE_FLT_FN_FLOATN_NX (BUILT_IN_EXPM1):
12266 CASE_FLT_FN (BUILT_IN_FABS):
12267 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
12268 CASE_FLT_FN (BUILT_IN_FDIM):
12269 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FDIM):
12270 CASE_FLT_FN (BUILT_IN_FLOOR):
12271 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
12272 CASE_FLT_FN (BUILT_IN_FMA):
12273 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
12274 CASE_FLT_FN (BUILT_IN_FMAX):
12275 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
12276 CASE_FLT_FN (BUILT_IN_FMIN):
12277 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
12278 CASE_FLT_FN (BUILT_IN_FMOD):
12279 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMOD):
12280 CASE_FLT_FN (BUILT_IN_FREXP):
12281 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FREXP):
12282 CASE_FLT_FN (BUILT_IN_HYPOT):
12283 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HYPOT):
12284 CASE_FLT_FN (BUILT_IN_ILOGB):
12285 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ILOGB):
12286 CASE_FLT_FN (BUILT_IN_LDEXP):
12287 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LDEXP):
12288 CASE_FLT_FN (BUILT_IN_LGAMMA):
12289 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LGAMMA):
12290 CASE_FLT_FN (BUILT_IN_LLRINT):
12291 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LLRINT):
12292 CASE_FLT_FN (BUILT_IN_LLROUND):
12293 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LLROUND):
12294 CASE_FLT_FN (BUILT_IN_LOG):
12295 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG):
12296 CASE_FLT_FN (BUILT_IN_LOG10):
12297 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG10):
12298 CASE_FLT_FN (BUILT_IN_LOG1P):
12299 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG1P):
12300 CASE_FLT_FN (BUILT_IN_LOG2):
12301 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG2):
12302 CASE_FLT_FN (BUILT_IN_LOGB):
12303 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOGB):
12304 CASE_FLT_FN (BUILT_IN_LRINT):
12305 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LRINT):
12306 CASE_FLT_FN (BUILT_IN_LROUND):
12307 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LROUND):
12308 CASE_FLT_FN (BUILT_IN_MODF):
12309 CASE_FLT_FN_FLOATN_NX (BUILT_IN_MODF):
12310 CASE_FLT_FN (BUILT_IN_NAN):
12311 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NAN):
12312 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12313 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
12314 CASE_FLT_FN (BUILT_IN_NEXTAFTER):
12315 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEXTAFTER):
12316 CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
12317 CASE_FLT_FN (BUILT_IN_POW):
12318 CASE_FLT_FN_FLOATN_NX (BUILT_IN_POW):
12319 CASE_FLT_FN (BUILT_IN_REMAINDER):
12320 CASE_FLT_FN_FLOATN_NX (BUILT_IN_REMAINDER):
12321 CASE_FLT_FN (BUILT_IN_REMQUO):
12322 CASE_FLT_FN_FLOATN_NX (BUILT_IN_REMQUO):
12323 CASE_FLT_FN (BUILT_IN_RINT):
12324 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
12325 CASE_FLT_FN (BUILT_IN_ROUND):
12326 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
12327 CASE_FLT_FN (BUILT_IN_SCALBLN):
12328 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SCALBLN):
12329 CASE_FLT_FN (BUILT_IN_SCALBN):
12330 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SCALBN):
12331 CASE_FLT_FN (BUILT_IN_SIN):
12332 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SIN):
12333 CASE_FLT_FN (BUILT_IN_SINH):
12334 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SINH):
12335 CASE_FLT_FN (BUILT_IN_SINCOS):
12336 CASE_FLT_FN (BUILT_IN_SQRT):
12337 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
12338 CASE_FLT_FN (BUILT_IN_TAN):
12339 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TAN):
12340 CASE_FLT_FN (BUILT_IN_TANH):
12341 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TANH):
12342 CASE_FLT_FN (BUILT_IN_TGAMMA):
12343 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TGAMMA):
12344 CASE_FLT_FN (BUILT_IN_TRUNC):
12345 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
12346 return true;
12348 case BUILT_IN_STPCPY:
12349 case BUILT_IN_STPNCPY:
12350 /* stpcpy is both referenced in libiberty's pex-win32.c and provided
12351 by libiberty's stpcpy.c for MinGW targets so we need to return true
12352 in order to be able to build libiberty in LTO mode for them. */
12353 return true;
12355 default:
12356 break;
12358 return false;
12361 /* Return true if OFFRNG is bounded to a subrange of offset values
12362 valid for the largest possible object. */
12364 bool
12365 access_ref::offset_bounded () const
12367 tree min = TYPE_MIN_VALUE (ptrdiff_type_node);
12368 tree max = TYPE_MAX_VALUE (ptrdiff_type_node);
12369 return wi::to_offset (min) <= offrng[0] && offrng[1] <= wi::to_offset (max);
12372 /* If CALLEE has known side effects, fill in INFO and return true.
12373 See tree-ssa-structalias.cc:find_func_aliases
12374 for the list of builtins we might need to handle here. */
12376 attr_fnspec
12377 builtin_fnspec (tree callee)
12379 built_in_function code = DECL_FUNCTION_CODE (callee);
12381 switch (code)
12383 /* All the following functions read memory pointed to by
12384 their second argument and write memory pointed to by first
12385 argument.
12386 strcat/strncat additionally reads memory pointed to by the first
12387 argument. */
12388 case BUILT_IN_STRCAT:
12389 case BUILT_IN_STRCAT_CHK:
12390 return "1cW 1 ";
12391 case BUILT_IN_STRNCAT:
12392 case BUILT_IN_STRNCAT_CHK:
12393 return "1cW 13";
12394 case BUILT_IN_STRCPY:
12395 case BUILT_IN_STRCPY_CHK:
12396 return "1cO 1 ";
12397 case BUILT_IN_STPCPY:
12398 case BUILT_IN_STPCPY_CHK:
12399 return ".cO 1 ";
12400 case BUILT_IN_STRNCPY:
12401 case BUILT_IN_MEMCPY:
12402 case BUILT_IN_MEMMOVE:
12403 case BUILT_IN_TM_MEMCPY:
12404 case BUILT_IN_TM_MEMMOVE:
12405 case BUILT_IN_STRNCPY_CHK:
12406 case BUILT_IN_MEMCPY_CHK:
12407 case BUILT_IN_MEMMOVE_CHK:
12408 return "1cO313";
12409 case BUILT_IN_MEMPCPY:
12410 case BUILT_IN_MEMPCPY_CHK:
12411 return ".cO313";
12412 case BUILT_IN_STPNCPY:
12413 case BUILT_IN_STPNCPY_CHK:
12414 return ".cO313";
12415 case BUILT_IN_BCOPY:
12416 return ".c23O3";
12417 case BUILT_IN_BZERO:
12418 return ".cO2";
12419 case BUILT_IN_MEMCMP:
12420 case BUILT_IN_MEMCMP_EQ:
12421 case BUILT_IN_BCMP:
12422 case BUILT_IN_STRNCMP:
12423 case BUILT_IN_STRNCMP_EQ:
12424 case BUILT_IN_STRNCASECMP:
12425 return ".cR3R3";
12427 /* The following functions read memory pointed to by their
12428 first argument. */
12429 CASE_BUILT_IN_TM_LOAD (1):
12430 CASE_BUILT_IN_TM_LOAD (2):
12431 CASE_BUILT_IN_TM_LOAD (4):
12432 CASE_BUILT_IN_TM_LOAD (8):
12433 CASE_BUILT_IN_TM_LOAD (FLOAT):
12434 CASE_BUILT_IN_TM_LOAD (DOUBLE):
12435 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
12436 CASE_BUILT_IN_TM_LOAD (M64):
12437 CASE_BUILT_IN_TM_LOAD (M128):
12438 CASE_BUILT_IN_TM_LOAD (M256):
12439 case BUILT_IN_TM_LOG:
12440 case BUILT_IN_TM_LOG_1:
12441 case BUILT_IN_TM_LOG_2:
12442 case BUILT_IN_TM_LOG_4:
12443 case BUILT_IN_TM_LOG_8:
12444 case BUILT_IN_TM_LOG_FLOAT:
12445 case BUILT_IN_TM_LOG_DOUBLE:
12446 case BUILT_IN_TM_LOG_LDOUBLE:
12447 case BUILT_IN_TM_LOG_M64:
12448 case BUILT_IN_TM_LOG_M128:
12449 case BUILT_IN_TM_LOG_M256:
12450 return ".cR ";
12452 case BUILT_IN_INDEX:
12453 case BUILT_IN_RINDEX:
12454 case BUILT_IN_STRCHR:
12455 case BUILT_IN_STRLEN:
12456 case BUILT_IN_STRRCHR:
12457 return ".cR ";
12458 case BUILT_IN_STRNLEN:
12459 return ".cR2";
12461 /* These read memory pointed to by the first argument.
12462 Allocating memory does not have any side-effects apart from
12463 being the definition point for the pointer.
12464 Unix98 specifies that errno is set on allocation failure. */
12465 case BUILT_IN_STRDUP:
12466 return "mCR ";
12467 case BUILT_IN_STRNDUP:
12468 return "mCR2";
12469 /* Allocating memory does not have any side-effects apart from
12470 being the definition point for the pointer. */
12471 case BUILT_IN_MALLOC:
12472 case BUILT_IN_ALIGNED_ALLOC:
12473 case BUILT_IN_CALLOC:
12474 case BUILT_IN_GOMP_ALLOC:
12475 return "mC";
12476 CASE_BUILT_IN_ALLOCA:
12477 return "mc";
12478 /* These read memory pointed to by the first argument with size
12479 in the third argument. */
12480 case BUILT_IN_MEMCHR:
12481 return ".cR3";
12482 /* These read memory pointed to by the first and second arguments. */
12483 case BUILT_IN_STRSTR:
12484 case BUILT_IN_STRPBRK:
12485 case BUILT_IN_STRCASECMP:
12486 case BUILT_IN_STRCSPN:
12487 case BUILT_IN_STRSPN:
12488 case BUILT_IN_STRCMP:
12489 case BUILT_IN_STRCMP_EQ:
12490 return ".cR R ";
12491 /* Freeing memory kills the pointed-to memory. More importantly
12492 the call has to serve as a barrier for moving loads and stores
12493 across it. */
12494 case BUILT_IN_STACK_RESTORE:
12495 case BUILT_IN_FREE:
12496 case BUILT_IN_GOMP_FREE:
12497 return ".co ";
12498 case BUILT_IN_VA_END:
12499 return ".cO ";
12500 /* Realloc serves both as allocation point and deallocation point. */
12501 case BUILT_IN_REALLOC:
12502 case BUILT_IN_GOMP_REALLOC:
12503 return ".Cw ";
12504 case BUILT_IN_GAMMA_R:
12505 case BUILT_IN_GAMMAF_R:
12506 case BUILT_IN_GAMMAL_R:
12507 case BUILT_IN_LGAMMA_R:
12508 case BUILT_IN_LGAMMAF_R:
12509 case BUILT_IN_LGAMMAL_R:
12510 return ".C. Ot";
12511 case BUILT_IN_FREXP:
12512 case BUILT_IN_FREXPF:
12513 case BUILT_IN_FREXPL:
12514 case BUILT_IN_MODF:
12515 case BUILT_IN_MODFF:
12516 case BUILT_IN_MODFL:
12517 return ".c. Ot";
12518 case BUILT_IN_REMQUO:
12519 case BUILT_IN_REMQUOF:
12520 case BUILT_IN_REMQUOL:
12521 return ".c. . Ot";
12522 case BUILT_IN_SINCOS:
12523 case BUILT_IN_SINCOSF:
12524 case BUILT_IN_SINCOSL:
12525 return ".c. OtOt";
12526 case BUILT_IN_MEMSET:
12527 case BUILT_IN_MEMSET_CHK:
12528 case BUILT_IN_TM_MEMSET:
12529 return "1cO3";
12530 CASE_BUILT_IN_TM_STORE (1):
12531 CASE_BUILT_IN_TM_STORE (2):
12532 CASE_BUILT_IN_TM_STORE (4):
12533 CASE_BUILT_IN_TM_STORE (8):
12534 CASE_BUILT_IN_TM_STORE (FLOAT):
12535 CASE_BUILT_IN_TM_STORE (DOUBLE):
12536 CASE_BUILT_IN_TM_STORE (LDOUBLE):
12537 CASE_BUILT_IN_TM_STORE (M64):
12538 CASE_BUILT_IN_TM_STORE (M128):
12539 CASE_BUILT_IN_TM_STORE (M256):
12540 return ".cO ";
12541 case BUILT_IN_STACK_SAVE:
12542 case BUILT_IN_RETURN:
12543 case BUILT_IN_EH_POINTER:
12544 case BUILT_IN_EH_FILTER:
12545 case BUILT_IN_UNWIND_RESUME:
12546 case BUILT_IN_CXA_END_CLEANUP:
12547 case BUILT_IN_EH_COPY_VALUES:
12548 case BUILT_IN_FRAME_ADDRESS:
12549 case BUILT_IN_APPLY_ARGS:
12550 case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
12551 case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
12552 case BUILT_IN_PREFETCH:
12553 case BUILT_IN_DWARF_CFA:
12554 case BUILT_IN_RETURN_ADDRESS:
12555 return ".c";
12556 case BUILT_IN_ASSUME_ALIGNED:
12557 case BUILT_IN_EXPECT:
12558 case BUILT_IN_EXPECT_WITH_PROBABILITY:
12559 return "1cX ";
12560 /* But posix_memalign stores a pointer into the memory pointed to
12561 by its first argument. */
12562 case BUILT_IN_POSIX_MEMALIGN:
12563 return ".cOt";
12565 default:
12566 return "";