Fix previous commit
[official-gcc.git] / gcc / builtins.c
blob5d811f113c9071329a631aacd275d80ba6013c75
1 /* Expand builtin functions.
2 Copyright (C) 1988-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "params.h"
35 #include "tm_p.h"
36 #include "stringpool.h"
37 #include "tree-vrp.h"
38 #include "tree-ssanames.h"
39 #include "expmed.h"
40 #include "optabs.h"
41 #include "emit-rtl.h"
42 #include "recog.h"
43 #include "diagnostic-core.h"
44 #include "alias.h"
45 #include "fold-const.h"
46 #include "fold-const-call.h"
47 #include "gimple-ssa-warn-restrict.h"
48 #include "stor-layout.h"
49 #include "calls.h"
50 #include "varasm.h"
51 #include "tree-object-size.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
75 #include "tree-dfa.h"
77 struct target_builtins default_target_builtins;
78 #if SWITCHABLE_TARGET
79 struct target_builtins *this_target_builtins = &default_target_builtins;
80 #endif
82 /* Define the names of the builtin function types and codes. */
83 const char *const built_in_class_names[BUILT_IN_LAST]
84 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
86 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
87 const char * built_in_names[(int) END_BUILTINS] =
89 #include "builtins.def"
92 /* Setup an array of builtin_info_type, make sure each element decl is
93 initialized to NULL_TREE. */
94 builtin_info_type builtin_info[(int)END_BUILTINS];
96 /* Non-zero if __builtin_constant_p should be folded right away. */
97 bool force_folding_builtin_constant_p;
99 static int target_char_cast (tree, char *);
100 static rtx get_memory_rtx (tree, tree);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx result_vector (int, rtx);
104 static void expand_builtin_prefetch (tree);
105 static rtx expand_builtin_apply_args (void);
106 static rtx expand_builtin_apply_args_1 (void);
107 static rtx expand_builtin_apply (rtx, rtx, rtx);
108 static void expand_builtin_return (rtx);
109 static enum type_class type_to_class (tree);
110 static rtx expand_builtin_classify_type (tree);
111 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
112 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
113 static rtx expand_builtin_interclass_mathfn (tree, rtx);
114 static rtx expand_builtin_sincos (tree);
115 static rtx expand_builtin_cexpi (tree, rtx);
116 static rtx expand_builtin_int_roundingfn (tree, rtx);
117 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
118 static rtx expand_builtin_next_arg (void);
119 static rtx expand_builtin_va_start (tree);
120 static rtx expand_builtin_va_end (tree);
121 static rtx expand_builtin_va_copy (tree);
122 static rtx inline_expand_builtin_string_cmp (tree, rtx);
123 static rtx expand_builtin_strcmp (tree, rtx);
124 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
125 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
126 static rtx expand_builtin_memchr (tree, rtx);
127 static rtx expand_builtin_memcpy (tree, rtx);
128 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
129 rtx target, tree exp,
130 memop_ret retmode,
131 bool might_overlap);
132 static rtx expand_builtin_memmove (tree, rtx);
133 static rtx expand_builtin_mempcpy (tree, rtx);
134 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
135 static rtx expand_builtin_strcat (tree, rtx);
136 static rtx expand_builtin_strcpy (tree, rtx);
137 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
138 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
139 static rtx expand_builtin_stpncpy (tree, rtx);
140 static rtx expand_builtin_strncat (tree, rtx);
141 static rtx expand_builtin_strncpy (tree, rtx);
142 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
143 static rtx expand_builtin_memset (tree, rtx, machine_mode);
144 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
145 static rtx expand_builtin_bzero (tree);
146 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
147 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
148 static rtx expand_builtin_alloca (tree);
149 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
150 static rtx expand_builtin_frame_address (tree, tree);
151 static tree stabilize_va_list_loc (location_t, tree, int);
152 static rtx expand_builtin_expect (tree, rtx);
153 static rtx expand_builtin_expect_with_probability (tree, rtx);
154 static tree fold_builtin_constant_p (tree);
155 static tree fold_builtin_classify_type (tree);
156 static tree fold_builtin_strlen (location_t, tree, tree);
157 static tree fold_builtin_inf (location_t, tree, int);
158 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
159 static bool validate_arg (const_tree, enum tree_code code);
160 static rtx expand_builtin_fabs (tree, rtx, rtx);
161 static rtx expand_builtin_signbit (tree, rtx);
162 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
163 static tree fold_builtin_isascii (location_t, tree);
164 static tree fold_builtin_toascii (location_t, tree);
165 static tree fold_builtin_isdigit (location_t, tree);
166 static tree fold_builtin_fabs (location_t, tree, tree);
167 static tree fold_builtin_abs (location_t, tree, tree);
168 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
169 enum tree_code);
170 static tree fold_builtin_0 (location_t, tree);
171 static tree fold_builtin_1 (location_t, tree, tree);
172 static tree fold_builtin_2 (location_t, tree, tree, tree);
173 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
174 static tree fold_builtin_varargs (location_t, tree, tree*, int);
176 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
177 static tree fold_builtin_strspn (location_t, tree, tree);
178 static tree fold_builtin_strcspn (location_t, tree, tree);
180 static rtx expand_builtin_object_size (tree);
181 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
182 enum built_in_function);
183 static void maybe_emit_chk_warning (tree, enum built_in_function);
184 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
185 static void maybe_emit_free_warning (tree);
186 static tree fold_builtin_object_size (tree, tree);
188 unsigned HOST_WIDE_INT target_newline;
189 unsigned HOST_WIDE_INT target_percent;
190 static unsigned HOST_WIDE_INT target_c;
191 static unsigned HOST_WIDE_INT target_s;
192 char target_percent_c[3];
193 char target_percent_s[3];
194 char target_percent_s_newline[4];
195 static tree do_mpfr_remquo (tree, tree, tree);
196 static tree do_mpfr_lgamma_r (tree, tree, tree);
197 static void expand_builtin_sync_synchronize (void);
199 /* Return true if NAME starts with __builtin_ or __sync_. */
201 static bool
202 is_builtin_name (const char *name)
204 if (strncmp (name, "__builtin_", 10) == 0)
205 return true;
206 if (strncmp (name, "__sync_", 7) == 0)
207 return true;
208 if (strncmp (name, "__atomic_", 9) == 0)
209 return true;
210 return false;
213 /* Return true if NODE should be considered for inline expansion regardless
214 of the optimization level. This means whenever a function is invoked with
215 its "internal" name, which normally contains the prefix "__builtin". */
217 bool
218 called_as_built_in (tree node)
220 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
221 we want the name used to call the function, not the name it
222 will have. */
223 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
224 return is_builtin_name (name);
227 /* Compute values M and N such that M divides (address of EXP - N) and such
228 that N < M. If these numbers can be determined, store M in alignp and N in
229 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
230 *alignp and any bit-offset to *bitposp.
232 Note that the address (and thus the alignment) computed here is based
233 on the address to which a symbol resolves, whereas DECL_ALIGN is based
234 on the address at which an object is actually located. These two
235 addresses are not always the same. For example, on ARM targets,
236 the address &foo of a Thumb function foo() has the lowest bit set,
237 whereas foo() itself starts on an even address.
239 If ADDR_P is true we are taking the address of the memory reference EXP
240 and thus cannot rely on the access taking place. */
242 static bool
243 get_object_alignment_2 (tree exp, unsigned int *alignp,
244 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
246 poly_int64 bitsize, bitpos;
247 tree offset;
248 machine_mode mode;
249 int unsignedp, reversep, volatilep;
250 unsigned int align = BITS_PER_UNIT;
251 bool known_alignment = false;
253 /* Get the innermost object and the constant (bitpos) and possibly
254 variable (offset) offset of the access. */
255 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
256 &unsignedp, &reversep, &volatilep);
258 /* Extract alignment information from the innermost object and
259 possibly adjust bitpos and offset. */
260 if (TREE_CODE (exp) == FUNCTION_DECL)
262 /* Function addresses can encode extra information besides their
263 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
264 allows the low bit to be used as a virtual bit, we know
265 that the address itself must be at least 2-byte aligned. */
266 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
267 align = 2 * BITS_PER_UNIT;
269 else if (TREE_CODE (exp) == LABEL_DECL)
271 else if (TREE_CODE (exp) == CONST_DECL)
273 /* The alignment of a CONST_DECL is determined by its initializer. */
274 exp = DECL_INITIAL (exp);
275 align = TYPE_ALIGN (TREE_TYPE (exp));
276 if (CONSTANT_CLASS_P (exp))
277 align = targetm.constant_alignment (exp, align);
279 known_alignment = true;
281 else if (DECL_P (exp))
283 align = DECL_ALIGN (exp);
284 known_alignment = true;
286 else if (TREE_CODE (exp) == INDIRECT_REF
287 || TREE_CODE (exp) == MEM_REF
288 || TREE_CODE (exp) == TARGET_MEM_REF)
290 tree addr = TREE_OPERAND (exp, 0);
291 unsigned ptr_align;
292 unsigned HOST_WIDE_INT ptr_bitpos;
293 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
295 /* If the address is explicitely aligned, handle that. */
296 if (TREE_CODE (addr) == BIT_AND_EXPR
297 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
299 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
300 ptr_bitmask *= BITS_PER_UNIT;
301 align = least_bit_hwi (ptr_bitmask);
302 addr = TREE_OPERAND (addr, 0);
305 known_alignment
306 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
307 align = MAX (ptr_align, align);
309 /* Re-apply explicit alignment to the bitpos. */
310 ptr_bitpos &= ptr_bitmask;
312 /* The alignment of the pointer operand in a TARGET_MEM_REF
313 has to take the variable offset parts into account. */
314 if (TREE_CODE (exp) == TARGET_MEM_REF)
316 if (TMR_INDEX (exp))
318 unsigned HOST_WIDE_INT step = 1;
319 if (TMR_STEP (exp))
320 step = TREE_INT_CST_LOW (TMR_STEP (exp));
321 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
323 if (TMR_INDEX2 (exp))
324 align = BITS_PER_UNIT;
325 known_alignment = false;
328 /* When EXP is an actual memory reference then we can use
329 TYPE_ALIGN of a pointer indirection to derive alignment.
330 Do so only if get_pointer_alignment_1 did not reveal absolute
331 alignment knowledge and if using that alignment would
332 improve the situation. */
333 unsigned int talign;
334 if (!addr_p && !known_alignment
335 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
336 && talign > align)
337 align = talign;
338 else
340 /* Else adjust bitpos accordingly. */
341 bitpos += ptr_bitpos;
342 if (TREE_CODE (exp) == MEM_REF
343 || TREE_CODE (exp) == TARGET_MEM_REF)
344 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
347 else if (TREE_CODE (exp) == STRING_CST)
349 /* STRING_CST are the only constant objects we allow to be not
350 wrapped inside a CONST_DECL. */
351 align = TYPE_ALIGN (TREE_TYPE (exp));
352 if (CONSTANT_CLASS_P (exp))
353 align = targetm.constant_alignment (exp, align);
355 known_alignment = true;
358 /* If there is a non-constant offset part extract the maximum
359 alignment that can prevail. */
360 if (offset)
362 unsigned int trailing_zeros = tree_ctz (offset);
363 if (trailing_zeros < HOST_BITS_PER_INT)
365 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
366 if (inner)
367 align = MIN (align, inner);
371 /* Account for the alignment of runtime coefficients, so that the constant
372 bitpos is guaranteed to be accurate. */
373 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
374 if (alt_align != 0 && alt_align < align)
376 align = alt_align;
377 known_alignment = false;
380 *alignp = align;
381 *bitposp = bitpos.coeffs[0] & (align - 1);
382 return known_alignment;
385 /* For a memory reference expression EXP compute values M and N such that M
386 divides (&EXP - N) and such that N < M. If these numbers can be determined,
387 store M in alignp and N in *BITPOSP and return true. Otherwise return false
388 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
390 bool
391 get_object_alignment_1 (tree exp, unsigned int *alignp,
392 unsigned HOST_WIDE_INT *bitposp)
394 return get_object_alignment_2 (exp, alignp, bitposp, false);
397 /* Return the alignment in bits of EXP, an object. */
399 unsigned int
400 get_object_alignment (tree exp)
402 unsigned HOST_WIDE_INT bitpos = 0;
403 unsigned int align;
405 get_object_alignment_1 (exp, &align, &bitpos);
407 /* align and bitpos now specify known low bits of the pointer.
408 ptr & (align - 1) == bitpos. */
410 if (bitpos != 0)
411 align = least_bit_hwi (bitpos);
412 return align;
415 /* For a pointer valued expression EXP compute values M and N such that M
416 divides (EXP - N) and such that N < M. If these numbers can be determined,
417 store M in alignp and N in *BITPOSP and return true. Return false if
418 the results are just a conservative approximation.
420 If EXP is not a pointer, false is returned too. */
422 bool
423 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
424 unsigned HOST_WIDE_INT *bitposp)
426 STRIP_NOPS (exp);
428 if (TREE_CODE (exp) == ADDR_EXPR)
429 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
430 alignp, bitposp, true);
431 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
433 unsigned int align;
434 unsigned HOST_WIDE_INT bitpos;
435 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
436 &align, &bitpos);
437 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
438 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
439 else
441 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
442 if (trailing_zeros < HOST_BITS_PER_INT)
444 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
445 if (inner)
446 align = MIN (align, inner);
449 *alignp = align;
450 *bitposp = bitpos & (align - 1);
451 return res;
453 else if (TREE_CODE (exp) == SSA_NAME
454 && POINTER_TYPE_P (TREE_TYPE (exp)))
456 unsigned int ptr_align, ptr_misalign;
457 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
459 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
461 *bitposp = ptr_misalign * BITS_PER_UNIT;
462 *alignp = ptr_align * BITS_PER_UNIT;
463 /* Make sure to return a sensible alignment when the multiplication
464 by BITS_PER_UNIT overflowed. */
465 if (*alignp == 0)
466 *alignp = 1u << (HOST_BITS_PER_INT - 1);
467 /* We cannot really tell whether this result is an approximation. */
468 return false;
470 else
472 *bitposp = 0;
473 *alignp = BITS_PER_UNIT;
474 return false;
477 else if (TREE_CODE (exp) == INTEGER_CST)
479 *alignp = BIGGEST_ALIGNMENT;
480 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
481 & (BIGGEST_ALIGNMENT - 1));
482 return true;
485 *bitposp = 0;
486 *alignp = BITS_PER_UNIT;
487 return false;
490 /* Return the alignment in bits of EXP, a pointer valued expression.
491 The alignment returned is, by default, the alignment of the thing that
492 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
494 Otherwise, look at the expression to see if we can do better, i.e., if the
495 expression is actually pointing at an object whose alignment is tighter. */
497 unsigned int
498 get_pointer_alignment (tree exp)
500 unsigned HOST_WIDE_INT bitpos = 0;
501 unsigned int align;
503 get_pointer_alignment_1 (exp, &align, &bitpos);
505 /* align and bitpos now specify known low bits of the pointer.
506 ptr & (align - 1) == bitpos. */
508 if (bitpos != 0)
509 align = least_bit_hwi (bitpos);
511 return align;
514 /* Return the number of leading non-zero elements in the sequence
515 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
516 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
518 unsigned
519 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
521 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
523 unsigned n;
525 if (eltsize == 1)
527 /* Optimize the common case of plain char. */
528 for (n = 0; n < maxelts; n++)
530 const char *elt = (const char*) ptr + n;
531 if (!*elt)
532 break;
535 else
537 for (n = 0; n < maxelts; n++)
539 const char *elt = (const char*) ptr + n * eltsize;
540 if (!memcmp (elt, "\0\0\0\0", eltsize))
541 break;
544 return n;
547 /* For a call at LOC to a function FN that expects a string in the argument
548 ARG, issue a diagnostic due to it being a called with an argument
549 declared at NONSTR that is a character array with no terminating NUL. */
551 void
552 warn_string_no_nul (location_t loc, const char *fn, tree arg, tree decl)
554 if (TREE_NO_WARNING (arg))
555 return;
557 loc = expansion_point_location_if_in_system_header (loc);
559 if (warning_at (loc, OPT_Wstringop_overflow_,
560 "%qs argument missing terminating nul", fn))
562 inform (DECL_SOURCE_LOCATION (decl),
563 "referenced argument declared here");
564 TREE_NO_WARNING (arg) = 1;
568 /* If EXP refers to an unterminated constant character array return
569 the declaration of the object of which the array is a member or
570 element and if SIZE is not null, set *SIZE to the size of
571 the unterminated array and set *EXACT if the size is exact or
572 clear it otherwise. Otherwise return null. */
574 tree
575 unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */)
577 /* C_STRLEN will return NULL and set DECL in the info
578 structure if EXP references a unterminated array. */
579 c_strlen_data lendata = { };
580 tree len = c_strlen (exp, 1, &lendata);
581 if (len == NULL_TREE && lendata.minlen && lendata.decl)
583 if (size)
585 len = lendata.minlen;
586 if (lendata.off)
588 /* Constant offsets are already accounted for in LENDATA.MINLEN,
589 but not in a SSA_NAME + CST expression. */
590 if (TREE_CODE (lendata.off) == INTEGER_CST)
591 *exact = true;
592 else if (TREE_CODE (lendata.off) == PLUS_EXPR
593 && TREE_CODE (TREE_OPERAND (lendata.off, 1)) == INTEGER_CST)
595 /* Subtract the offset from the size of the array. */
596 *exact = false;
597 tree temp = TREE_OPERAND (lendata.off, 1);
598 temp = fold_convert (ssizetype, temp);
599 len = fold_build2 (MINUS_EXPR, ssizetype, len, temp);
601 else
602 *exact = false;
604 else
605 *exact = true;
607 *size = len;
609 return lendata.decl;
612 return NULL_TREE;
615 /* Compute the length of a null-terminated character string or wide
616 character string handling character sizes of 1, 2, and 4 bytes.
617 TREE_STRING_LENGTH is not the right way because it evaluates to
618 the size of the character array in bytes (as opposed to characters)
619 and because it can contain a zero byte in the middle.
621 ONLY_VALUE should be nonzero if the result is not going to be emitted
622 into the instruction stream and zero if it is going to be expanded.
623 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
624 is returned, otherwise NULL, since
625 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
626 evaluate the side-effects.
628 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
629 accesses. Note that this implies the result is not going to be emitted
630 into the instruction stream.
632 Additional information about the string accessed may be recorded
633 in DATA. For example, if ARG references an unterminated string,
634 then the declaration will be stored in the DECL field. If the
635 length of the unterminated string can be determined, it'll be
636 stored in the LEN field. Note this length could well be different
637 than what a C strlen call would return.
639 ELTSIZE is 1 for normal single byte character strings, and 2 or
640 4 for wide characer strings. ELTSIZE is by default 1.
642 The value returned is of type `ssizetype'. */
644 tree
645 c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
647 /* If we were not passed a DATA pointer, then get one to a local
648 structure. That avoids having to check DATA for NULL before
649 each time we want to use it. */
650 c_strlen_data local_strlen_data = { };
651 if (!data)
652 data = &local_strlen_data;
654 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
656 tree src = STRIP_NOPS (arg);
657 if (TREE_CODE (src) == COND_EXPR
658 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
660 tree len1, len2;
662 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
663 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
664 if (tree_int_cst_equal (len1, len2))
665 return len1;
668 if (TREE_CODE (src) == COMPOUND_EXPR
669 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
670 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
672 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
674 /* Offset from the beginning of the string in bytes. */
675 tree byteoff;
676 tree memsize;
677 tree decl;
678 src = string_constant (src, &byteoff, &memsize, &decl);
679 if (src == 0)
680 return NULL_TREE;
682 /* Determine the size of the string element. */
683 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
684 return NULL_TREE;
686 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
687 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
688 in case the latter is less than the size of the array, such as when
689 SRC refers to a short string literal used to initialize a large array.
690 In that case, the elements of the array after the terminating NUL are
691 all NUL. */
692 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
693 strelts = strelts / eltsize;
695 if (!tree_fits_uhwi_p (memsize))
696 return NULL_TREE;
698 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
700 /* PTR can point to the byte representation of any string type, including
701 char* and wchar_t*. */
702 const char *ptr = TREE_STRING_POINTER (src);
704 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
706 /* The code below works only for single byte character types. */
707 if (eltsize != 1)
708 return NULL_TREE;
710 /* If the string has an internal NUL character followed by any
711 non-NUL characters (e.g., "foo\0bar"), we can't compute
712 the offset to the following NUL if we don't know where to
713 start searching for it. */
714 unsigned len = string_length (ptr, eltsize, strelts);
716 /* Return when an embedded null character is found or none at all.
717 In the latter case, set the DECL/LEN field in the DATA structure
718 so that callers may examine them. */
719 if (len + 1 < strelts)
720 return NULL_TREE;
721 else if (len >= maxelts)
723 data->decl = decl;
724 data->off = byteoff;
725 data->minlen = ssize_int (len);
726 return NULL_TREE;
729 /* For empty strings the result should be zero. */
730 if (len == 0)
731 return ssize_int (0);
733 /* We don't know the starting offset, but we do know that the string
734 has no internal zero bytes. If the offset falls within the bounds
735 of the string subtract the offset from the length of the string,
736 and return that. Otherwise the length is zero. Take care to
737 use SAVE_EXPR in case the OFFSET has side-effects. */
738 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
739 : byteoff;
740 offsave = fold_convert_loc (loc, sizetype, offsave);
741 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
742 size_int (len));
743 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
744 offsave);
745 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
746 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
747 build_zero_cst (ssizetype));
750 /* Offset from the beginning of the string in elements. */
751 HOST_WIDE_INT eltoff;
753 /* We have a known offset into the string. Start searching there for
754 a null character if we can represent it as a single HOST_WIDE_INT. */
755 if (byteoff == 0)
756 eltoff = 0;
757 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
758 eltoff = -1;
759 else
760 eltoff = tree_to_uhwi (byteoff) / eltsize;
762 /* If the offset is known to be out of bounds, warn, and call strlen at
763 runtime. */
764 if (eltoff < 0 || eltoff >= maxelts)
766 /* Suppress multiple warnings for propagated constant strings. */
767 if (only_value != 2
768 && !TREE_NO_WARNING (arg)
769 && warning_at (loc, OPT_Warray_bounds,
770 "offset %qwi outside bounds of constant string",
771 eltoff))
773 if (decl)
774 inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
775 TREE_NO_WARNING (arg) = 1;
777 return NULL_TREE;
780 /* If eltoff is larger than strelts but less than maxelts the
781 string length is zero, since the excess memory will be zero. */
782 if (eltoff > strelts)
783 return ssize_int (0);
785 /* Use strlen to search for the first zero byte. Since any strings
786 constructed with build_string will have nulls appended, we win even
787 if we get handed something like (char[4])"abcd".
789 Since ELTOFF is our starting index into the string, no further
790 calculation is needed. */
791 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
792 strelts - eltoff);
794 /* Don't know what to return if there was no zero termination.
795 Ideally this would turn into a gcc_checking_assert over time.
796 Set DECL/LEN so callers can examine them. */
797 if (len >= maxelts - eltoff)
799 data->decl = decl;
800 data->off = byteoff;
801 data->minlen = ssize_int (len);
802 return NULL_TREE;
805 return ssize_int (len);
808 /* Return a constant integer corresponding to target reading
809 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
810 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
811 are assumed to be zero, otherwise it reads as many characters
812 as needed. */
815 c_readstr (const char *str, scalar_int_mode mode,
816 bool null_terminated_p/*=true*/)
818 HOST_WIDE_INT ch;
819 unsigned int i, j;
820 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
822 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
823 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
824 / HOST_BITS_PER_WIDE_INT;
826 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
827 for (i = 0; i < len; i++)
828 tmp[i] = 0;
830 ch = 1;
831 for (i = 0; i < GET_MODE_SIZE (mode); i++)
833 j = i;
834 if (WORDS_BIG_ENDIAN)
835 j = GET_MODE_SIZE (mode) - i - 1;
836 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
837 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
838 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
839 j *= BITS_PER_UNIT;
841 if (ch || !null_terminated_p)
842 ch = (unsigned char) str[i];
843 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
846 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
847 return immed_wide_int_const (c, mode);
850 /* Cast a target constant CST to target CHAR and if that value fits into
851 host char type, return zero and put that value into variable pointed to by
852 P. */
854 static int
855 target_char_cast (tree cst, char *p)
857 unsigned HOST_WIDE_INT val, hostval;
859 if (TREE_CODE (cst) != INTEGER_CST
860 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
861 return 1;
863 /* Do not care if it fits or not right here. */
864 val = TREE_INT_CST_LOW (cst);
866 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
867 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
869 hostval = val;
870 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
871 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
873 if (val != hostval)
874 return 1;
876 *p = hostval;
877 return 0;
880 /* Similar to save_expr, but assumes that arbitrary code is not executed
881 in between the multiple evaluations. In particular, we assume that a
882 non-addressable local variable will not be modified. */
884 static tree
885 builtin_save_expr (tree exp)
887 if (TREE_CODE (exp) == SSA_NAME
888 || (TREE_ADDRESSABLE (exp) == 0
889 && (TREE_CODE (exp) == PARM_DECL
890 || (VAR_P (exp) && !TREE_STATIC (exp)))))
891 return exp;
893 return save_expr (exp);
896 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
897 times to get the address of either a higher stack frame, or a return
898 address located within it (depending on FNDECL_CODE). */
900 static rtx
901 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
903 int i;
904 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
905 if (tem == NULL_RTX)
907 /* For a zero count with __builtin_return_address, we don't care what
908 frame address we return, because target-specific definitions will
909 override us. Therefore frame pointer elimination is OK, and using
910 the soft frame pointer is OK.
912 For a nonzero count, or a zero count with __builtin_frame_address,
913 we require a stable offset from the current frame pointer to the
914 previous one, so we must use the hard frame pointer, and
915 we must disable frame pointer elimination. */
916 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
917 tem = frame_pointer_rtx;
918 else
920 tem = hard_frame_pointer_rtx;
922 /* Tell reload not to eliminate the frame pointer. */
923 crtl->accesses_prior_frames = 1;
927 if (count > 0)
928 SETUP_FRAME_ADDRESSES ();
930 /* On the SPARC, the return address is not in the frame, it is in a
931 register. There is no way to access it off of the current frame
932 pointer, but it can be accessed off the previous frame pointer by
933 reading the value from the register window save area. */
934 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
935 count--;
937 /* Scan back COUNT frames to the specified frame. */
938 for (i = 0; i < count; i++)
940 /* Assume the dynamic chain pointer is in the word that the
941 frame address points to, unless otherwise specified. */
942 tem = DYNAMIC_CHAIN_ADDRESS (tem);
943 tem = memory_address (Pmode, tem);
944 tem = gen_frame_mem (Pmode, tem);
945 tem = copy_to_reg (tem);
948 /* For __builtin_frame_address, return what we've got. But, on
949 the SPARC for example, we may have to add a bias. */
950 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
951 return FRAME_ADDR_RTX (tem);
953 /* For __builtin_return_address, get the return address from that frame. */
954 #ifdef RETURN_ADDR_RTX
955 tem = RETURN_ADDR_RTX (count, tem);
956 #else
957 tem = memory_address (Pmode,
958 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
959 tem = gen_frame_mem (Pmode, tem);
960 #endif
961 return tem;
964 /* Alias set used for setjmp buffer. */
965 static alias_set_type setjmp_alias_set = -1;
967 /* Construct the leading half of a __builtin_setjmp call. Control will
968 return to RECEIVER_LABEL. This is also called directly by the SJLJ
969 exception handling code. */
971 void
972 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
974 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
975 rtx stack_save;
976 rtx mem;
978 if (setjmp_alias_set == -1)
979 setjmp_alias_set = new_alias_set ();
981 buf_addr = convert_memory_address (Pmode, buf_addr);
983 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
985 /* We store the frame pointer and the address of receiver_label in
986 the buffer and use the rest of it for the stack save area, which
987 is machine-dependent. */
989 mem = gen_rtx_MEM (Pmode, buf_addr);
990 set_mem_alias_set (mem, setjmp_alias_set);
991 emit_move_insn (mem, hard_frame_pointer_rtx);
993 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
994 GET_MODE_SIZE (Pmode))),
995 set_mem_alias_set (mem, setjmp_alias_set);
997 emit_move_insn (validize_mem (mem),
998 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
1000 stack_save = gen_rtx_MEM (sa_mode,
1001 plus_constant (Pmode, buf_addr,
1002 2 * GET_MODE_SIZE (Pmode)));
1003 set_mem_alias_set (stack_save, setjmp_alias_set);
1004 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1006 /* If there is further processing to do, do it. */
1007 if (targetm.have_builtin_setjmp_setup ())
1008 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
1010 /* We have a nonlocal label. */
1011 cfun->has_nonlocal_label = 1;
1014 /* Construct the trailing part of a __builtin_setjmp call. This is
1015 also called directly by the SJLJ exception handling code.
1016 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
1018 void
1019 expand_builtin_setjmp_receiver (rtx receiver_label)
1021 rtx chain;
1023 /* Mark the FP as used when we get here, so we have to make sure it's
1024 marked as used by this function. */
1025 emit_use (hard_frame_pointer_rtx);
1027 /* Mark the static chain as clobbered here so life information
1028 doesn't get messed up for it. */
1029 chain = rtx_for_static_chain (current_function_decl, true);
1030 if (chain && REG_P (chain))
1031 emit_clobber (chain);
1033 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
1035 /* If the argument pointer can be eliminated in favor of the
1036 frame pointer, we don't need to restore it. We assume here
1037 that if such an elimination is present, it can always be used.
1038 This is the case on all known machines; if we don't make this
1039 assumption, we do unnecessary saving on many machines. */
1040 size_t i;
1041 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1043 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1044 if (elim_regs[i].from == ARG_POINTER_REGNUM
1045 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1046 break;
1048 if (i == ARRAY_SIZE (elim_regs))
1050 /* Now restore our arg pointer from the address at which it
1051 was saved in our stack frame. */
1052 emit_move_insn (crtl->args.internal_arg_pointer,
1053 copy_to_reg (get_arg_pointer_save_area ()));
1057 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
1058 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
1059 else if (targetm.have_nonlocal_goto_receiver ())
1060 emit_insn (targetm.gen_nonlocal_goto_receiver ());
1061 else
1062 { /* Nothing */ }
1064 /* We must not allow the code we just generated to be reordered by
1065 scheduling. Specifically, the update of the frame pointer must
1066 happen immediately, not later. */
1067 emit_insn (gen_blockage ());
1070 /* __builtin_longjmp is passed a pointer to an array of five words (not
1071 all will be used on all machines). It operates similarly to the C
1072 library function of the same name, but is more efficient. Much of
1073 the code below is copied from the handling of non-local gotos. */
1075 static void
1076 expand_builtin_longjmp (rtx buf_addr, rtx value)
1078 rtx fp, lab, stack;
1079 rtx_insn *insn, *last;
1080 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1082 /* DRAP is needed for stack realign if longjmp is expanded to current
1083 function */
1084 if (SUPPORTS_STACK_ALIGNMENT)
1085 crtl->need_drap = true;
1087 if (setjmp_alias_set == -1)
1088 setjmp_alias_set = new_alias_set ();
1090 buf_addr = convert_memory_address (Pmode, buf_addr);
1092 buf_addr = force_reg (Pmode, buf_addr);
1094 /* We require that the user must pass a second argument of 1, because
1095 that is what builtin_setjmp will return. */
1096 gcc_assert (value == const1_rtx);
1098 last = get_last_insn ();
1099 if (targetm.have_builtin_longjmp ())
1100 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1101 else
1103 fp = gen_rtx_MEM (Pmode, buf_addr);
1104 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1105 GET_MODE_SIZE (Pmode)));
1107 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1108 2 * GET_MODE_SIZE (Pmode)));
1109 set_mem_alias_set (fp, setjmp_alias_set);
1110 set_mem_alias_set (lab, setjmp_alias_set);
1111 set_mem_alias_set (stack, setjmp_alias_set);
1113 /* Pick up FP, label, and SP from the block and jump. This code is
1114 from expand_goto in stmt.c; see there for detailed comments. */
1115 if (targetm.have_nonlocal_goto ())
1116 /* We have to pass a value to the nonlocal_goto pattern that will
1117 get copied into the static_chain pointer, but it does not matter
1118 what that value is, because builtin_setjmp does not use it. */
1119 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1120 else
1122 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1123 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1125 lab = copy_to_reg (lab);
1127 /* Restore the frame pointer and stack pointer. We must use a
1128 temporary since the setjmp buffer may be a local. */
1129 fp = copy_to_reg (fp);
1130 emit_stack_restore (SAVE_NONLOCAL, stack);
1132 /* Ensure the frame pointer move is not optimized. */
1133 emit_insn (gen_blockage ());
1134 emit_clobber (hard_frame_pointer_rtx);
1135 emit_clobber (frame_pointer_rtx);
1136 emit_move_insn (hard_frame_pointer_rtx, fp);
1138 emit_use (hard_frame_pointer_rtx);
1139 emit_use (stack_pointer_rtx);
1140 emit_indirect_jump (lab);
1144 /* Search backwards and mark the jump insn as a non-local goto.
1145 Note that this precludes the use of __builtin_longjmp to a
1146 __builtin_setjmp target in the same function. However, we've
1147 already cautioned the user that these functions are for
1148 internal exception handling use only. */
1149 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1151 gcc_assert (insn != last);
1153 if (JUMP_P (insn))
1155 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1156 break;
1158 else if (CALL_P (insn))
1159 break;
1163 static inline bool
1164 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1166 return (iter->i < iter->n);
1169 /* This function validates the types of a function call argument list
1170 against a specified list of tree_codes. If the last specifier is a 0,
1171 that represents an ellipsis, otherwise the last specifier must be a
1172 VOID_TYPE. */
1174 static bool
1175 validate_arglist (const_tree callexpr, ...)
1177 enum tree_code code;
1178 bool res = 0;
1179 va_list ap;
1180 const_call_expr_arg_iterator iter;
1181 const_tree arg;
1183 va_start (ap, callexpr);
1184 init_const_call_expr_arg_iterator (callexpr, &iter);
1186 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1187 tree fn = CALL_EXPR_FN (callexpr);
1188 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1190 for (unsigned argno = 1; ; ++argno)
1192 code = (enum tree_code) va_arg (ap, int);
1194 switch (code)
1196 case 0:
1197 /* This signifies an ellipses, any further arguments are all ok. */
1198 res = true;
1199 goto end;
1200 case VOID_TYPE:
1201 /* This signifies an endlink, if no arguments remain, return
1202 true, otherwise return false. */
1203 res = !more_const_call_expr_args_p (&iter);
1204 goto end;
1205 case POINTER_TYPE:
1206 /* The actual argument must be nonnull when either the whole
1207 called function has been declared nonnull, or when the formal
1208 argument corresponding to the actual argument has been. */
1209 if (argmap
1210 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1212 arg = next_const_call_expr_arg (&iter);
1213 if (!validate_arg (arg, code) || integer_zerop (arg))
1214 goto end;
1215 break;
1217 /* FALLTHRU */
1218 default:
1219 /* If no parameters remain or the parameter's code does not
1220 match the specified code, return false. Otherwise continue
1221 checking any remaining arguments. */
1222 arg = next_const_call_expr_arg (&iter);
1223 if (!validate_arg (arg, code))
1224 goto end;
1225 break;
1229 /* We need gotos here since we can only have one VA_CLOSE in a
1230 function. */
1231 end: ;
1232 va_end (ap);
1234 BITMAP_FREE (argmap);
1236 return res;
1239 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1240 and the address of the save area. */
1242 static rtx
1243 expand_builtin_nonlocal_goto (tree exp)
1245 tree t_label, t_save_area;
1246 rtx r_label, r_save_area, r_fp, r_sp;
1247 rtx_insn *insn;
1249 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1250 return NULL_RTX;
1252 t_label = CALL_EXPR_ARG (exp, 0);
1253 t_save_area = CALL_EXPR_ARG (exp, 1);
1255 r_label = expand_normal (t_label);
1256 r_label = convert_memory_address (Pmode, r_label);
1257 r_save_area = expand_normal (t_save_area);
1258 r_save_area = convert_memory_address (Pmode, r_save_area);
1259 /* Copy the address of the save location to a register just in case it was
1260 based on the frame pointer. */
1261 r_save_area = copy_to_reg (r_save_area);
1262 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1263 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1264 plus_constant (Pmode, r_save_area,
1265 GET_MODE_SIZE (Pmode)));
1267 crtl->has_nonlocal_goto = 1;
1269 /* ??? We no longer need to pass the static chain value, afaik. */
1270 if (targetm.have_nonlocal_goto ())
1271 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1272 else
1274 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1275 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1277 r_label = copy_to_reg (r_label);
1279 /* Restore the frame pointer and stack pointer. We must use a
1280 temporary since the setjmp buffer may be a local. */
1281 r_fp = copy_to_reg (r_fp);
1282 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1284 /* Ensure the frame pointer move is not optimized. */
1285 emit_insn (gen_blockage ());
1286 emit_clobber (hard_frame_pointer_rtx);
1287 emit_clobber (frame_pointer_rtx);
1288 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1290 /* USE of hard_frame_pointer_rtx added for consistency;
1291 not clear if really needed. */
1292 emit_use (hard_frame_pointer_rtx);
1293 emit_use (stack_pointer_rtx);
1295 /* If the architecture is using a GP register, we must
1296 conservatively assume that the target function makes use of it.
1297 The prologue of functions with nonlocal gotos must therefore
1298 initialize the GP register to the appropriate value, and we
1299 must then make sure that this value is live at the point
1300 of the jump. (Note that this doesn't necessarily apply
1301 to targets with a nonlocal_goto pattern; they are free
1302 to implement it in their own way. Note also that this is
1303 a no-op if the GP register is a global invariant.) */
1304 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1305 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1306 emit_use (pic_offset_table_rtx);
1308 emit_indirect_jump (r_label);
1311 /* Search backwards to the jump insn and mark it as a
1312 non-local goto. */
1313 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1315 if (JUMP_P (insn))
1317 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1318 break;
1320 else if (CALL_P (insn))
1321 break;
1324 return const0_rtx;
1327 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1328 (not all will be used on all machines) that was passed to __builtin_setjmp.
1329 It updates the stack pointer in that block to the current value. This is
1330 also called directly by the SJLJ exception handling code. */
1332 void
1333 expand_builtin_update_setjmp_buf (rtx buf_addr)
1335 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1336 buf_addr = convert_memory_address (Pmode, buf_addr);
1337 rtx stack_save
1338 = gen_rtx_MEM (sa_mode,
1339 memory_address
1340 (sa_mode,
1341 plus_constant (Pmode, buf_addr,
1342 2 * GET_MODE_SIZE (Pmode))));
1344 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1347 /* Expand a call to __builtin_prefetch. For a target that does not support
1348 data prefetch, evaluate the memory address argument in case it has side
1349 effects. */
1351 static void
1352 expand_builtin_prefetch (tree exp)
1354 tree arg0, arg1, arg2;
1355 int nargs;
1356 rtx op0, op1, op2;
1358 if (!validate_arglist (exp, POINTER_TYPE, 0))
1359 return;
1361 arg0 = CALL_EXPR_ARG (exp, 0);
1363 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1364 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1365 locality). */
1366 nargs = call_expr_nargs (exp);
1367 if (nargs > 1)
1368 arg1 = CALL_EXPR_ARG (exp, 1);
1369 else
1370 arg1 = integer_zero_node;
1371 if (nargs > 2)
1372 arg2 = CALL_EXPR_ARG (exp, 2);
1373 else
1374 arg2 = integer_three_node;
1376 /* Argument 0 is an address. */
1377 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1379 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1380 if (TREE_CODE (arg1) != INTEGER_CST)
1382 error ("second argument to %<__builtin_prefetch%> must be a constant");
1383 arg1 = integer_zero_node;
1385 op1 = expand_normal (arg1);
1386 /* Argument 1 must be either zero or one. */
1387 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1389 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1390 " using zero");
1391 op1 = const0_rtx;
1394 /* Argument 2 (locality) must be a compile-time constant int. */
1395 if (TREE_CODE (arg2) != INTEGER_CST)
1397 error ("third argument to %<__builtin_prefetch%> must be a constant");
1398 arg2 = integer_zero_node;
1400 op2 = expand_normal (arg2);
1401 /* Argument 2 must be 0, 1, 2, or 3. */
1402 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1404 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1405 op2 = const0_rtx;
1408 if (targetm.have_prefetch ())
1410 class expand_operand ops[3];
1412 create_address_operand (&ops[0], op0);
1413 create_integer_operand (&ops[1], INTVAL (op1));
1414 create_integer_operand (&ops[2], INTVAL (op2));
1415 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1416 return;
1419 /* Don't do anything with direct references to volatile memory, but
1420 generate code to handle other side effects. */
1421 if (!MEM_P (op0) && side_effects_p (op0))
1422 emit_insn (op0);
1425 /* Get a MEM rtx for expression EXP which is the address of an operand
1426 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
1427 the maximum length of the block of memory that might be accessed or
1428 NULL if unknown. */
1430 static rtx
1431 get_memory_rtx (tree exp, tree len)
1433 tree orig_exp = exp;
1434 rtx addr, mem;
1436 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1437 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1438 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1439 exp = TREE_OPERAND (exp, 0);
1441 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1442 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1444 /* Get an expression we can use to find the attributes to assign to MEM.
1445 First remove any nops. */
1446 while (CONVERT_EXPR_P (exp)
1447 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1448 exp = TREE_OPERAND (exp, 0);
1450 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1451 (as builtin stringops may alias with anything). */
1452 exp = fold_build2 (MEM_REF,
1453 build_array_type (char_type_node,
1454 build_range_type (sizetype,
1455 size_one_node, len)),
1456 exp, build_int_cst (ptr_type_node, 0));
1458 /* If the MEM_REF has no acceptable address, try to get the base object
1459 from the original address we got, and build an all-aliasing
1460 unknown-sized access to that one. */
1461 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1462 set_mem_attributes (mem, exp, 0);
1463 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1464 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1465 0))))
1467 exp = build_fold_addr_expr (exp);
1468 exp = fold_build2 (MEM_REF,
1469 build_array_type (char_type_node,
1470 build_range_type (sizetype,
1471 size_zero_node,
1472 NULL)),
1473 exp, build_int_cst (ptr_type_node, 0));
1474 set_mem_attributes (mem, exp, 0);
1476 set_mem_alias_set (mem, 0);
1477 return mem;
1480 /* Built-in functions to perform an untyped call and return. */
1482 #define apply_args_mode \
1483 (this_target_builtins->x_apply_args_mode)
1484 #define apply_result_mode \
1485 (this_target_builtins->x_apply_result_mode)
1487 /* Return the size required for the block returned by __builtin_apply_args,
1488 and initialize apply_args_mode. */
1490 static int
1491 apply_args_size (void)
1493 static int size = -1;
1494 int align;
1495 unsigned int regno;
1497 /* The values computed by this function never change. */
1498 if (size < 0)
1500 /* The first value is the incoming arg-pointer. */
1501 size = GET_MODE_SIZE (Pmode);
1503 /* The second value is the structure value address unless this is
1504 passed as an "invisible" first argument. */
1505 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1506 size += GET_MODE_SIZE (Pmode);
1508 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1509 if (FUNCTION_ARG_REGNO_P (regno))
1511 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1513 gcc_assert (mode != VOIDmode);
1515 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1516 if (size % align != 0)
1517 size = CEIL (size, align) * align;
1518 size += GET_MODE_SIZE (mode);
1519 apply_args_mode[regno] = mode;
1521 else
1523 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1526 return size;
1529 /* Return the size required for the block returned by __builtin_apply,
1530 and initialize apply_result_mode. */
1532 static int
1533 apply_result_size (void)
1535 static int size = -1;
1536 int align, regno;
1538 /* The values computed by this function never change. */
1539 if (size < 0)
1541 size = 0;
1543 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1544 if (targetm.calls.function_value_regno_p (regno))
1546 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1548 gcc_assert (mode != VOIDmode);
1550 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1551 if (size % align != 0)
1552 size = CEIL (size, align) * align;
1553 size += GET_MODE_SIZE (mode);
1554 apply_result_mode[regno] = mode;
1556 else
1557 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1559 /* Allow targets that use untyped_call and untyped_return to override
1560 the size so that machine-specific information can be stored here. */
1561 #ifdef APPLY_RESULT_SIZE
1562 size = APPLY_RESULT_SIZE;
1563 #endif
1565 return size;
1568 /* Create a vector describing the result block RESULT. If SAVEP is true,
1569 the result block is used to save the values; otherwise it is used to
1570 restore the values. */
1572 static rtx
1573 result_vector (int savep, rtx result)
1575 int regno, size, align, nelts;
1576 fixed_size_mode mode;
1577 rtx reg, mem;
1578 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1580 size = nelts = 0;
1581 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1582 if ((mode = apply_result_mode[regno]) != VOIDmode)
1584 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1585 if (size % align != 0)
1586 size = CEIL (size, align) * align;
1587 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1588 mem = adjust_address (result, mode, size);
1589 savevec[nelts++] = (savep
1590 ? gen_rtx_SET (mem, reg)
1591 : gen_rtx_SET (reg, mem));
1592 size += GET_MODE_SIZE (mode);
1594 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1597 /* Save the state required to perform an untyped call with the same
1598 arguments as were passed to the current function. */
1600 static rtx
1601 expand_builtin_apply_args_1 (void)
1603 rtx registers, tem;
1604 int size, align, regno;
1605 fixed_size_mode mode;
1606 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1608 /* Create a block where the arg-pointer, structure value address,
1609 and argument registers can be saved. */
1610 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1612 /* Walk past the arg-pointer and structure value address. */
1613 size = GET_MODE_SIZE (Pmode);
1614 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1615 size += GET_MODE_SIZE (Pmode);
1617 /* Save each register used in calling a function to the block. */
1618 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1619 if ((mode = apply_args_mode[regno]) != VOIDmode)
1621 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1622 if (size % align != 0)
1623 size = CEIL (size, align) * align;
1625 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1627 emit_move_insn (adjust_address (registers, mode, size), tem);
1628 size += GET_MODE_SIZE (mode);
1631 /* Save the arg pointer to the block. */
1632 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1633 /* We need the pointer as the caller actually passed them to us, not
1634 as we might have pretended they were passed. Make sure it's a valid
1635 operand, as emit_move_insn isn't expected to handle a PLUS. */
1636 if (STACK_GROWS_DOWNWARD)
1638 = force_operand (plus_constant (Pmode, tem,
1639 crtl->args.pretend_args_size),
1640 NULL_RTX);
1641 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1643 size = GET_MODE_SIZE (Pmode);
1645 /* Save the structure value address unless this is passed as an
1646 "invisible" first argument. */
1647 if (struct_incoming_value)
1648 emit_move_insn (adjust_address (registers, Pmode, size),
1649 copy_to_reg (struct_incoming_value));
1651 /* Return the address of the block. */
1652 return copy_addr_to_reg (XEXP (registers, 0));
1655 /* __builtin_apply_args returns block of memory allocated on
1656 the stack into which is stored the arg pointer, structure
1657 value address, static chain, and all the registers that might
1658 possibly be used in performing a function call. The code is
1659 moved to the start of the function so the incoming values are
1660 saved. */
1662 static rtx
1663 expand_builtin_apply_args (void)
1665 /* Don't do __builtin_apply_args more than once in a function.
1666 Save the result of the first call and reuse it. */
1667 if (apply_args_value != 0)
1668 return apply_args_value;
1670 /* When this function is called, it means that registers must be
1671 saved on entry to this function. So we migrate the
1672 call to the first insn of this function. */
1673 rtx temp;
1675 start_sequence ();
1676 temp = expand_builtin_apply_args_1 ();
1677 rtx_insn *seq = get_insns ();
1678 end_sequence ();
1680 apply_args_value = temp;
1682 /* Put the insns after the NOTE that starts the function.
1683 If this is inside a start_sequence, make the outer-level insn
1684 chain current, so the code is placed at the start of the
1685 function. If internal_arg_pointer is a non-virtual pseudo,
1686 it needs to be placed after the function that initializes
1687 that pseudo. */
1688 push_topmost_sequence ();
1689 if (REG_P (crtl->args.internal_arg_pointer)
1690 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1691 emit_insn_before (seq, parm_birth_insn);
1692 else
1693 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1694 pop_topmost_sequence ();
1695 return temp;
1699 /* Perform an untyped call and save the state required to perform an
1700 untyped return of whatever value was returned by the given function. */
1702 static rtx
1703 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1705 int size, align, regno;
1706 fixed_size_mode mode;
1707 rtx incoming_args, result, reg, dest, src;
1708 rtx_call_insn *call_insn;
1709 rtx old_stack_level = 0;
1710 rtx call_fusage = 0;
1711 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1713 arguments = convert_memory_address (Pmode, arguments);
1715 /* Create a block where the return registers can be saved. */
1716 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1718 /* Fetch the arg pointer from the ARGUMENTS block. */
1719 incoming_args = gen_reg_rtx (Pmode);
1720 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1721 if (!STACK_GROWS_DOWNWARD)
1722 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1723 incoming_args, 0, OPTAB_LIB_WIDEN);
1725 /* Push a new argument block and copy the arguments. Do not allow
1726 the (potential) memcpy call below to interfere with our stack
1727 manipulations. */
1728 do_pending_stack_adjust ();
1729 NO_DEFER_POP;
1731 /* Save the stack with nonlocal if available. */
1732 if (targetm.have_save_stack_nonlocal ())
1733 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1734 else
1735 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1737 /* Allocate a block of memory onto the stack and copy the memory
1738 arguments to the outgoing arguments address. We can pass TRUE
1739 as the 4th argument because we just saved the stack pointer
1740 and will restore it right after the call. */
1741 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1743 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1744 may have already set current_function_calls_alloca to true.
1745 current_function_calls_alloca won't be set if argsize is zero,
1746 so we have to guarantee need_drap is true here. */
1747 if (SUPPORTS_STACK_ALIGNMENT)
1748 crtl->need_drap = true;
1750 dest = virtual_outgoing_args_rtx;
1751 if (!STACK_GROWS_DOWNWARD)
1753 if (CONST_INT_P (argsize))
1754 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1755 else
1756 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1758 dest = gen_rtx_MEM (BLKmode, dest);
1759 set_mem_align (dest, PARM_BOUNDARY);
1760 src = gen_rtx_MEM (BLKmode, incoming_args);
1761 set_mem_align (src, PARM_BOUNDARY);
1762 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1764 /* Refer to the argument block. */
1765 apply_args_size ();
1766 arguments = gen_rtx_MEM (BLKmode, arguments);
1767 set_mem_align (arguments, PARM_BOUNDARY);
1769 /* Walk past the arg-pointer and structure value address. */
1770 size = GET_MODE_SIZE (Pmode);
1771 if (struct_value)
1772 size += GET_MODE_SIZE (Pmode);
1774 /* Restore each of the registers previously saved. Make USE insns
1775 for each of these registers for use in making the call. */
1776 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1777 if ((mode = apply_args_mode[regno]) != VOIDmode)
1779 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1780 if (size % align != 0)
1781 size = CEIL (size, align) * align;
1782 reg = gen_rtx_REG (mode, regno);
1783 emit_move_insn (reg, adjust_address (arguments, mode, size));
1784 use_reg (&call_fusage, reg);
1785 size += GET_MODE_SIZE (mode);
1788 /* Restore the structure value address unless this is passed as an
1789 "invisible" first argument. */
1790 size = GET_MODE_SIZE (Pmode);
1791 if (struct_value)
1793 rtx value = gen_reg_rtx (Pmode);
1794 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1795 emit_move_insn (struct_value, value);
1796 if (REG_P (struct_value))
1797 use_reg (&call_fusage, struct_value);
1800 /* All arguments and registers used for the call are set up by now! */
1801 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1803 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1804 and we don't want to load it into a register as an optimization,
1805 because prepare_call_address already did it if it should be done. */
1806 if (GET_CODE (function) != SYMBOL_REF)
1807 function = memory_address (FUNCTION_MODE, function);
1809 /* Generate the actual call instruction and save the return value. */
1810 if (targetm.have_untyped_call ())
1812 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1813 emit_call_insn (targetm.gen_untyped_call (mem, result,
1814 result_vector (1, result)));
1816 else if (targetm.have_call_value ())
1818 rtx valreg = 0;
1820 /* Locate the unique return register. It is not possible to
1821 express a call that sets more than one return register using
1822 call_value; use untyped_call for that. In fact, untyped_call
1823 only needs to save the return registers in the given block. */
1824 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1825 if ((mode = apply_result_mode[regno]) != VOIDmode)
1827 gcc_assert (!valreg); /* have_untyped_call required. */
1829 valreg = gen_rtx_REG (mode, regno);
1832 emit_insn (targetm.gen_call_value (valreg,
1833 gen_rtx_MEM (FUNCTION_MODE, function),
1834 const0_rtx, NULL_RTX, const0_rtx));
1836 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1838 else
1839 gcc_unreachable ();
1841 /* Find the CALL insn we just emitted, and attach the register usage
1842 information. */
1843 call_insn = last_call_insn ();
1844 add_function_usage_to (call_insn, call_fusage);
1846 /* Restore the stack. */
1847 if (targetm.have_save_stack_nonlocal ())
1848 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1849 else
1850 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1851 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1853 OK_DEFER_POP;
1855 /* Return the address of the result block. */
1856 result = copy_addr_to_reg (XEXP (result, 0));
1857 return convert_memory_address (ptr_mode, result);
1860 /* Perform an untyped return. */
1862 static void
1863 expand_builtin_return (rtx result)
1865 int size, align, regno;
1866 fixed_size_mode mode;
1867 rtx reg;
1868 rtx_insn *call_fusage = 0;
1870 result = convert_memory_address (Pmode, result);
1872 apply_result_size ();
1873 result = gen_rtx_MEM (BLKmode, result);
1875 if (targetm.have_untyped_return ())
1877 rtx vector = result_vector (0, result);
1878 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1879 emit_barrier ();
1880 return;
1883 /* Restore the return value and note that each value is used. */
1884 size = 0;
1885 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1886 if ((mode = apply_result_mode[regno]) != VOIDmode)
1888 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1889 if (size % align != 0)
1890 size = CEIL (size, align) * align;
1891 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1892 emit_move_insn (reg, adjust_address (result, mode, size));
1894 push_to_sequence (call_fusage);
1895 emit_use (reg);
1896 call_fusage = get_insns ();
1897 end_sequence ();
1898 size += GET_MODE_SIZE (mode);
1901 /* Put the USE insns before the return. */
1902 emit_insn (call_fusage);
1904 /* Return whatever values was restored by jumping directly to the end
1905 of the function. */
1906 expand_naked_return ();
1909 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1911 static enum type_class
1912 type_to_class (tree type)
1914 switch (TREE_CODE (type))
1916 case VOID_TYPE: return void_type_class;
1917 case INTEGER_TYPE: return integer_type_class;
1918 case ENUMERAL_TYPE: return enumeral_type_class;
1919 case BOOLEAN_TYPE: return boolean_type_class;
1920 case POINTER_TYPE: return pointer_type_class;
1921 case REFERENCE_TYPE: return reference_type_class;
1922 case OFFSET_TYPE: return offset_type_class;
1923 case REAL_TYPE: return real_type_class;
1924 case COMPLEX_TYPE: return complex_type_class;
1925 case FUNCTION_TYPE: return function_type_class;
1926 case METHOD_TYPE: return method_type_class;
1927 case RECORD_TYPE: return record_type_class;
1928 case UNION_TYPE:
1929 case QUAL_UNION_TYPE: return union_type_class;
1930 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1931 ? string_type_class : array_type_class);
1932 case LANG_TYPE: return lang_type_class;
1933 default: return no_type_class;
1937 /* Expand a call EXP to __builtin_classify_type. */
1939 static rtx
1940 expand_builtin_classify_type (tree exp)
1942 if (call_expr_nargs (exp))
1943 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1944 return GEN_INT (no_type_class);
1947 /* This helper macro, meant to be used in mathfn_built_in below, determines
1948 which among a set of builtin math functions is appropriate for a given type
1949 mode. The `F' (float) and `L' (long double) are automatically generated
1950 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1951 types, there are additional types that are considered with 'F32', 'F64',
1952 'F128', etc. suffixes. */
1953 #define CASE_MATHFN(MATHFN) \
1954 CASE_CFN_##MATHFN: \
1955 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1956 fcodel = BUILT_IN_##MATHFN##L ; break;
1957 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1958 types. */
1959 #define CASE_MATHFN_FLOATN(MATHFN) \
1960 CASE_CFN_##MATHFN: \
1961 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1962 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1963 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1964 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1965 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1966 break;
1967 /* Similar to above, but appends _R after any F/L suffix. */
1968 #define CASE_MATHFN_REENT(MATHFN) \
1969 case CFN_BUILT_IN_##MATHFN##_R: \
1970 case CFN_BUILT_IN_##MATHFN##F_R: \
1971 case CFN_BUILT_IN_##MATHFN##L_R: \
1972 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1973 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1975 /* Return a function equivalent to FN but operating on floating-point
1976 values of type TYPE, or END_BUILTINS if no such function exists.
1977 This is purely an operation on function codes; it does not guarantee
1978 that the target actually has an implementation of the function. */
1980 static built_in_function
1981 mathfn_built_in_2 (tree type, combined_fn fn)
1983 tree mtype;
1984 built_in_function fcode, fcodef, fcodel;
1985 built_in_function fcodef16 = END_BUILTINS;
1986 built_in_function fcodef32 = END_BUILTINS;
1987 built_in_function fcodef64 = END_BUILTINS;
1988 built_in_function fcodef128 = END_BUILTINS;
1989 built_in_function fcodef32x = END_BUILTINS;
1990 built_in_function fcodef64x = END_BUILTINS;
1991 built_in_function fcodef128x = END_BUILTINS;
1993 switch (fn)
1995 CASE_MATHFN (ACOS)
1996 CASE_MATHFN (ACOSH)
1997 CASE_MATHFN (ASIN)
1998 CASE_MATHFN (ASINH)
1999 CASE_MATHFN (ATAN)
2000 CASE_MATHFN (ATAN2)
2001 CASE_MATHFN (ATANH)
2002 CASE_MATHFN (CBRT)
2003 CASE_MATHFN_FLOATN (CEIL)
2004 CASE_MATHFN (CEXPI)
2005 CASE_MATHFN_FLOATN (COPYSIGN)
2006 CASE_MATHFN (COS)
2007 CASE_MATHFN (COSH)
2008 CASE_MATHFN (DREM)
2009 CASE_MATHFN (ERF)
2010 CASE_MATHFN (ERFC)
2011 CASE_MATHFN (EXP)
2012 CASE_MATHFN (EXP10)
2013 CASE_MATHFN (EXP2)
2014 CASE_MATHFN (EXPM1)
2015 CASE_MATHFN (FABS)
2016 CASE_MATHFN (FDIM)
2017 CASE_MATHFN_FLOATN (FLOOR)
2018 CASE_MATHFN_FLOATN (FMA)
2019 CASE_MATHFN_FLOATN (FMAX)
2020 CASE_MATHFN_FLOATN (FMIN)
2021 CASE_MATHFN (FMOD)
2022 CASE_MATHFN (FREXP)
2023 CASE_MATHFN (GAMMA)
2024 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
2025 CASE_MATHFN (HUGE_VAL)
2026 CASE_MATHFN (HYPOT)
2027 CASE_MATHFN (ILOGB)
2028 CASE_MATHFN (ICEIL)
2029 CASE_MATHFN (IFLOOR)
2030 CASE_MATHFN (INF)
2031 CASE_MATHFN (IRINT)
2032 CASE_MATHFN (IROUND)
2033 CASE_MATHFN (ISINF)
2034 CASE_MATHFN (J0)
2035 CASE_MATHFN (J1)
2036 CASE_MATHFN (JN)
2037 CASE_MATHFN (LCEIL)
2038 CASE_MATHFN (LDEXP)
2039 CASE_MATHFN (LFLOOR)
2040 CASE_MATHFN (LGAMMA)
2041 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
2042 CASE_MATHFN (LLCEIL)
2043 CASE_MATHFN (LLFLOOR)
2044 CASE_MATHFN (LLRINT)
2045 CASE_MATHFN (LLROUND)
2046 CASE_MATHFN (LOG)
2047 CASE_MATHFN (LOG10)
2048 CASE_MATHFN (LOG1P)
2049 CASE_MATHFN (LOG2)
2050 CASE_MATHFN (LOGB)
2051 CASE_MATHFN (LRINT)
2052 CASE_MATHFN (LROUND)
2053 CASE_MATHFN (MODF)
2054 CASE_MATHFN (NAN)
2055 CASE_MATHFN (NANS)
2056 CASE_MATHFN_FLOATN (NEARBYINT)
2057 CASE_MATHFN (NEXTAFTER)
2058 CASE_MATHFN (NEXTTOWARD)
2059 CASE_MATHFN (POW)
2060 CASE_MATHFN (POWI)
2061 CASE_MATHFN (POW10)
2062 CASE_MATHFN (REMAINDER)
2063 CASE_MATHFN (REMQUO)
2064 CASE_MATHFN_FLOATN (RINT)
2065 CASE_MATHFN_FLOATN (ROUND)
2066 CASE_MATHFN_FLOATN (ROUNDEVEN)
2067 CASE_MATHFN (SCALB)
2068 CASE_MATHFN (SCALBLN)
2069 CASE_MATHFN (SCALBN)
2070 CASE_MATHFN (SIGNBIT)
2071 CASE_MATHFN (SIGNIFICAND)
2072 CASE_MATHFN (SIN)
2073 CASE_MATHFN (SINCOS)
2074 CASE_MATHFN (SINH)
2075 CASE_MATHFN_FLOATN (SQRT)
2076 CASE_MATHFN (TAN)
2077 CASE_MATHFN (TANH)
2078 CASE_MATHFN (TGAMMA)
2079 CASE_MATHFN_FLOATN (TRUNC)
2080 CASE_MATHFN (Y0)
2081 CASE_MATHFN (Y1)
2082 CASE_MATHFN (YN)
2084 default:
2085 return END_BUILTINS;
2088 mtype = TYPE_MAIN_VARIANT (type);
2089 if (mtype == double_type_node)
2090 return fcode;
2091 else if (mtype == float_type_node)
2092 return fcodef;
2093 else if (mtype == long_double_type_node)
2094 return fcodel;
2095 else if (mtype == float16_type_node)
2096 return fcodef16;
2097 else if (mtype == float32_type_node)
2098 return fcodef32;
2099 else if (mtype == float64_type_node)
2100 return fcodef64;
2101 else if (mtype == float128_type_node)
2102 return fcodef128;
2103 else if (mtype == float32x_type_node)
2104 return fcodef32x;
2105 else if (mtype == float64x_type_node)
2106 return fcodef64x;
2107 else if (mtype == float128x_type_node)
2108 return fcodef128x;
2109 else
2110 return END_BUILTINS;
2113 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2114 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2115 otherwise use the explicit declaration. If we can't do the conversion,
2116 return null. */
2118 static tree
2119 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2121 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2122 if (fcode2 == END_BUILTINS)
2123 return NULL_TREE;
2125 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2126 return NULL_TREE;
2128 return builtin_decl_explicit (fcode2);
2131 /* Like mathfn_built_in_1, but always use the implicit array. */
2133 tree
2134 mathfn_built_in (tree type, combined_fn fn)
2136 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2139 /* Like mathfn_built_in_1, but take a built_in_function and
2140 always use the implicit array. */
2142 tree
2143 mathfn_built_in (tree type, enum built_in_function fn)
2145 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2148 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2149 return its code, otherwise return IFN_LAST. Note that this function
2150 only tests whether the function is defined in internals.def, not whether
2151 it is actually available on the target. */
2153 internal_fn
2154 associated_internal_fn (tree fndecl)
2156 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2157 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2158 switch (DECL_FUNCTION_CODE (fndecl))
2160 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2161 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2162 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2163 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2164 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2165 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2166 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2167 #include "internal-fn.def"
2169 CASE_FLT_FN (BUILT_IN_POW10):
2170 return IFN_EXP10;
2172 CASE_FLT_FN (BUILT_IN_DREM):
2173 return IFN_REMAINDER;
2175 CASE_FLT_FN (BUILT_IN_SCALBN):
2176 CASE_FLT_FN (BUILT_IN_SCALBLN):
2177 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2178 return IFN_LDEXP;
2179 return IFN_LAST;
2181 default:
2182 return IFN_LAST;
2186 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2187 on the current target by a call to an internal function, return the
2188 code of that internal function, otherwise return IFN_LAST. The caller
2189 is responsible for ensuring that any side-effects of the built-in
2190 call are dealt with correctly. E.g. if CALL sets errno, the caller
2191 must decide that the errno result isn't needed or make it available
2192 in some other way. */
2194 internal_fn
2195 replacement_internal_fn (gcall *call)
2197 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2199 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2200 if (ifn != IFN_LAST)
2202 tree_pair types = direct_internal_fn_types (ifn, call);
2203 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2204 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2205 return ifn;
2208 return IFN_LAST;
2211 /* Expand a call to the builtin trinary math functions (fma).
2212 Return NULL_RTX if a normal call should be emitted rather than expanding the
2213 function in-line. EXP is the expression that is a call to the builtin
2214 function; if convenient, the result should be placed in TARGET.
2215 SUBTARGET may be used as the target for computing one of EXP's
2216 operands. */
2218 static rtx
2219 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2221 optab builtin_optab;
2222 rtx op0, op1, op2, result;
2223 rtx_insn *insns;
2224 tree fndecl = get_callee_fndecl (exp);
2225 tree arg0, arg1, arg2;
2226 machine_mode mode;
2228 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2229 return NULL_RTX;
2231 arg0 = CALL_EXPR_ARG (exp, 0);
2232 arg1 = CALL_EXPR_ARG (exp, 1);
2233 arg2 = CALL_EXPR_ARG (exp, 2);
2235 switch (DECL_FUNCTION_CODE (fndecl))
2237 CASE_FLT_FN (BUILT_IN_FMA):
2238 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2239 builtin_optab = fma_optab; break;
2240 default:
2241 gcc_unreachable ();
2244 /* Make a suitable register to place result in. */
2245 mode = TYPE_MODE (TREE_TYPE (exp));
2247 /* Before working hard, check whether the instruction is available. */
2248 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2249 return NULL_RTX;
2251 result = gen_reg_rtx (mode);
2253 /* Always stabilize the argument list. */
2254 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2255 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2256 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2258 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2259 op1 = expand_normal (arg1);
2260 op2 = expand_normal (arg2);
2262 start_sequence ();
2264 /* Compute into RESULT.
2265 Set RESULT to wherever the result comes back. */
2266 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2267 result, 0);
2269 /* If we were unable to expand via the builtin, stop the sequence
2270 (without outputting the insns) and call to the library function
2271 with the stabilized argument list. */
2272 if (result == 0)
2274 end_sequence ();
2275 return expand_call (exp, target, target == const0_rtx);
2278 /* Output the entire sequence. */
2279 insns = get_insns ();
2280 end_sequence ();
2281 emit_insn (insns);
2283 return result;
2286 /* Expand a call to the builtin sin and cos math functions.
2287 Return NULL_RTX if a normal call should be emitted rather than expanding the
2288 function in-line. EXP is the expression that is a call to the builtin
2289 function; if convenient, the result should be placed in TARGET.
2290 SUBTARGET may be used as the target for computing one of EXP's
2291 operands. */
2293 static rtx
2294 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2296 optab builtin_optab;
2297 rtx op0;
2298 rtx_insn *insns;
2299 tree fndecl = get_callee_fndecl (exp);
2300 machine_mode mode;
2301 tree arg;
2303 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2304 return NULL_RTX;
2306 arg = CALL_EXPR_ARG (exp, 0);
2308 switch (DECL_FUNCTION_CODE (fndecl))
2310 CASE_FLT_FN (BUILT_IN_SIN):
2311 CASE_FLT_FN (BUILT_IN_COS):
2312 builtin_optab = sincos_optab; break;
2313 default:
2314 gcc_unreachable ();
2317 /* Make a suitable register to place result in. */
2318 mode = TYPE_MODE (TREE_TYPE (exp));
2320 /* Check if sincos insn is available, otherwise fallback
2321 to sin or cos insn. */
2322 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2323 switch (DECL_FUNCTION_CODE (fndecl))
2325 CASE_FLT_FN (BUILT_IN_SIN):
2326 builtin_optab = sin_optab; break;
2327 CASE_FLT_FN (BUILT_IN_COS):
2328 builtin_optab = cos_optab; break;
2329 default:
2330 gcc_unreachable ();
2333 /* Before working hard, check whether the instruction is available. */
2334 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2336 rtx result = gen_reg_rtx (mode);
2338 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2339 need to expand the argument again. This way, we will not perform
2340 side-effects more the once. */
2341 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2343 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2345 start_sequence ();
2347 /* Compute into RESULT.
2348 Set RESULT to wherever the result comes back. */
2349 if (builtin_optab == sincos_optab)
2351 int ok;
2353 switch (DECL_FUNCTION_CODE (fndecl))
2355 CASE_FLT_FN (BUILT_IN_SIN):
2356 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2357 break;
2358 CASE_FLT_FN (BUILT_IN_COS):
2359 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2360 break;
2361 default:
2362 gcc_unreachable ();
2364 gcc_assert (ok);
2366 else
2367 result = expand_unop (mode, builtin_optab, op0, result, 0);
2369 if (result != 0)
2371 /* Output the entire sequence. */
2372 insns = get_insns ();
2373 end_sequence ();
2374 emit_insn (insns);
2375 return result;
2378 /* If we were unable to expand via the builtin, stop the sequence
2379 (without outputting the insns) and call to the library function
2380 with the stabilized argument list. */
2381 end_sequence ();
2384 return expand_call (exp, target, target == const0_rtx);
2387 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2388 return an RTL instruction code that implements the functionality.
2389 If that isn't possible or available return CODE_FOR_nothing. */
2391 static enum insn_code
2392 interclass_mathfn_icode (tree arg, tree fndecl)
2394 bool errno_set = false;
2395 optab builtin_optab = unknown_optab;
2396 machine_mode mode;
2398 switch (DECL_FUNCTION_CODE (fndecl))
2400 CASE_FLT_FN (BUILT_IN_ILOGB):
2401 errno_set = true; builtin_optab = ilogb_optab; break;
2402 CASE_FLT_FN (BUILT_IN_ISINF):
2403 builtin_optab = isinf_optab; break;
2404 case BUILT_IN_ISNORMAL:
2405 case BUILT_IN_ISFINITE:
2406 CASE_FLT_FN (BUILT_IN_FINITE):
2407 case BUILT_IN_FINITED32:
2408 case BUILT_IN_FINITED64:
2409 case BUILT_IN_FINITED128:
2410 case BUILT_IN_ISINFD32:
2411 case BUILT_IN_ISINFD64:
2412 case BUILT_IN_ISINFD128:
2413 /* These builtins have no optabs (yet). */
2414 break;
2415 default:
2416 gcc_unreachable ();
2419 /* There's no easy way to detect the case we need to set EDOM. */
2420 if (flag_errno_math && errno_set)
2421 return CODE_FOR_nothing;
2423 /* Optab mode depends on the mode of the input argument. */
2424 mode = TYPE_MODE (TREE_TYPE (arg));
2426 if (builtin_optab)
2427 return optab_handler (builtin_optab, mode);
2428 return CODE_FOR_nothing;
2431 /* Expand a call to one of the builtin math functions that operate on
2432 floating point argument and output an integer result (ilogb, isinf,
2433 isnan, etc).
2434 Return 0 if a normal call should be emitted rather than expanding the
2435 function in-line. EXP is the expression that is a call to the builtin
2436 function; if convenient, the result should be placed in TARGET. */
2438 static rtx
2439 expand_builtin_interclass_mathfn (tree exp, rtx target)
2441 enum insn_code icode = CODE_FOR_nothing;
2442 rtx op0;
2443 tree fndecl = get_callee_fndecl (exp);
2444 machine_mode mode;
2445 tree arg;
2447 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2448 return NULL_RTX;
2450 arg = CALL_EXPR_ARG (exp, 0);
2451 icode = interclass_mathfn_icode (arg, fndecl);
2452 mode = TYPE_MODE (TREE_TYPE (arg));
2454 if (icode != CODE_FOR_nothing)
2456 class expand_operand ops[1];
2457 rtx_insn *last = get_last_insn ();
2458 tree orig_arg = arg;
2460 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2461 need to expand the argument again. This way, we will not perform
2462 side-effects more the once. */
2463 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2465 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2467 if (mode != GET_MODE (op0))
2468 op0 = convert_to_mode (mode, op0, 0);
2470 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2471 if (maybe_legitimize_operands (icode, 0, 1, ops)
2472 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2473 return ops[0].value;
2475 delete_insns_since (last);
2476 CALL_EXPR_ARG (exp, 0) = orig_arg;
2479 return NULL_RTX;
2482 /* Expand a call to the builtin sincos math function.
2483 Return NULL_RTX if a normal call should be emitted rather than expanding the
2484 function in-line. EXP is the expression that is a call to the builtin
2485 function. */
2487 static rtx
2488 expand_builtin_sincos (tree exp)
2490 rtx op0, op1, op2, target1, target2;
2491 machine_mode mode;
2492 tree arg, sinp, cosp;
2493 int result;
2494 location_t loc = EXPR_LOCATION (exp);
2495 tree alias_type, alias_off;
2497 if (!validate_arglist (exp, REAL_TYPE,
2498 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2499 return NULL_RTX;
2501 arg = CALL_EXPR_ARG (exp, 0);
2502 sinp = CALL_EXPR_ARG (exp, 1);
2503 cosp = CALL_EXPR_ARG (exp, 2);
2505 /* Make a suitable register to place result in. */
2506 mode = TYPE_MODE (TREE_TYPE (arg));
2508 /* Check if sincos insn is available, otherwise emit the call. */
2509 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2510 return NULL_RTX;
2512 target1 = gen_reg_rtx (mode);
2513 target2 = gen_reg_rtx (mode);
2515 op0 = expand_normal (arg);
2516 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2517 alias_off = build_int_cst (alias_type, 0);
2518 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2519 sinp, alias_off));
2520 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2521 cosp, alias_off));
2523 /* Compute into target1 and target2.
2524 Set TARGET to wherever the result comes back. */
2525 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2526 gcc_assert (result);
2528 /* Move target1 and target2 to the memory locations indicated
2529 by op1 and op2. */
2530 emit_move_insn (op1, target1);
2531 emit_move_insn (op2, target2);
2533 return const0_rtx;
2536 /* Expand a call to the internal cexpi builtin to the sincos math function.
2537 EXP is the expression that is a call to the builtin function; if convenient,
2538 the result should be placed in TARGET. */
2540 static rtx
2541 expand_builtin_cexpi (tree exp, rtx target)
2543 tree fndecl = get_callee_fndecl (exp);
2544 tree arg, type;
2545 machine_mode mode;
2546 rtx op0, op1, op2;
2547 location_t loc = EXPR_LOCATION (exp);
2549 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2550 return NULL_RTX;
2552 arg = CALL_EXPR_ARG (exp, 0);
2553 type = TREE_TYPE (arg);
2554 mode = TYPE_MODE (TREE_TYPE (arg));
2556 /* Try expanding via a sincos optab, fall back to emitting a libcall
2557 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2558 is only generated from sincos, cexp or if we have either of them. */
2559 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2561 op1 = gen_reg_rtx (mode);
2562 op2 = gen_reg_rtx (mode);
2564 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2566 /* Compute into op1 and op2. */
2567 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2569 else if (targetm.libc_has_function (function_sincos))
2571 tree call, fn = NULL_TREE;
2572 tree top1, top2;
2573 rtx op1a, op2a;
2575 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2576 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2577 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2578 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2579 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2580 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2581 else
2582 gcc_unreachable ();
2584 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2585 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2586 op1a = copy_addr_to_reg (XEXP (op1, 0));
2587 op2a = copy_addr_to_reg (XEXP (op2, 0));
2588 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2589 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2591 /* Make sure not to fold the sincos call again. */
2592 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2593 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2594 call, 3, arg, top1, top2));
2596 else
2598 tree call, fn = NULL_TREE, narg;
2599 tree ctype = build_complex_type (type);
2601 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2602 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2603 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2604 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2605 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2606 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2607 else
2608 gcc_unreachable ();
2610 /* If we don't have a decl for cexp create one. This is the
2611 friendliest fallback if the user calls __builtin_cexpi
2612 without full target C99 function support. */
2613 if (fn == NULL_TREE)
2615 tree fntype;
2616 const char *name = NULL;
2618 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2619 name = "cexpf";
2620 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2621 name = "cexp";
2622 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2623 name = "cexpl";
2625 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2626 fn = build_fn_decl (name, fntype);
2629 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2630 build_real (type, dconst0), arg);
2632 /* Make sure not to fold the cexp call again. */
2633 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2634 return expand_expr (build_call_nary (ctype, call, 1, narg),
2635 target, VOIDmode, EXPAND_NORMAL);
2638 /* Now build the proper return type. */
2639 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2640 make_tree (TREE_TYPE (arg), op2),
2641 make_tree (TREE_TYPE (arg), op1)),
2642 target, VOIDmode, EXPAND_NORMAL);
2645 /* Conveniently construct a function call expression. FNDECL names the
2646 function to be called, N is the number of arguments, and the "..."
2647 parameters are the argument expressions. Unlike build_call_exr
2648 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2650 static tree
2651 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2653 va_list ap;
2654 tree fntype = TREE_TYPE (fndecl);
2655 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2657 va_start (ap, n);
2658 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2659 va_end (ap);
2660 SET_EXPR_LOCATION (fn, loc);
2661 return fn;
2664 /* Expand a call to one of the builtin rounding functions gcc defines
2665 as an extension (lfloor and lceil). As these are gcc extensions we
2666 do not need to worry about setting errno to EDOM.
2667 If expanding via optab fails, lower expression to (int)(floor(x)).
2668 EXP is the expression that is a call to the builtin function;
2669 if convenient, the result should be placed in TARGET. */
2671 static rtx
2672 expand_builtin_int_roundingfn (tree exp, rtx target)
2674 convert_optab builtin_optab;
2675 rtx op0, tmp;
2676 rtx_insn *insns;
2677 tree fndecl = get_callee_fndecl (exp);
2678 enum built_in_function fallback_fn;
2679 tree fallback_fndecl;
2680 machine_mode mode;
2681 tree arg;
2683 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2684 return NULL_RTX;
2686 arg = CALL_EXPR_ARG (exp, 0);
2688 switch (DECL_FUNCTION_CODE (fndecl))
2690 CASE_FLT_FN (BUILT_IN_ICEIL):
2691 CASE_FLT_FN (BUILT_IN_LCEIL):
2692 CASE_FLT_FN (BUILT_IN_LLCEIL):
2693 builtin_optab = lceil_optab;
2694 fallback_fn = BUILT_IN_CEIL;
2695 break;
2697 CASE_FLT_FN (BUILT_IN_IFLOOR):
2698 CASE_FLT_FN (BUILT_IN_LFLOOR):
2699 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2700 builtin_optab = lfloor_optab;
2701 fallback_fn = BUILT_IN_FLOOR;
2702 break;
2704 default:
2705 gcc_unreachable ();
2708 /* Make a suitable register to place result in. */
2709 mode = TYPE_MODE (TREE_TYPE (exp));
2711 target = gen_reg_rtx (mode);
2713 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2714 need to expand the argument again. This way, we will not perform
2715 side-effects more the once. */
2716 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2718 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2720 start_sequence ();
2722 /* Compute into TARGET. */
2723 if (expand_sfix_optab (target, op0, builtin_optab))
2725 /* Output the entire sequence. */
2726 insns = get_insns ();
2727 end_sequence ();
2728 emit_insn (insns);
2729 return target;
2732 /* If we were unable to expand via the builtin, stop the sequence
2733 (without outputting the insns). */
2734 end_sequence ();
2736 /* Fall back to floating point rounding optab. */
2737 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2739 /* For non-C99 targets we may end up without a fallback fndecl here
2740 if the user called __builtin_lfloor directly. In this case emit
2741 a call to the floor/ceil variants nevertheless. This should result
2742 in the best user experience for not full C99 targets. */
2743 if (fallback_fndecl == NULL_TREE)
2745 tree fntype;
2746 const char *name = NULL;
2748 switch (DECL_FUNCTION_CODE (fndecl))
2750 case BUILT_IN_ICEIL:
2751 case BUILT_IN_LCEIL:
2752 case BUILT_IN_LLCEIL:
2753 name = "ceil";
2754 break;
2755 case BUILT_IN_ICEILF:
2756 case BUILT_IN_LCEILF:
2757 case BUILT_IN_LLCEILF:
2758 name = "ceilf";
2759 break;
2760 case BUILT_IN_ICEILL:
2761 case BUILT_IN_LCEILL:
2762 case BUILT_IN_LLCEILL:
2763 name = "ceill";
2764 break;
2765 case BUILT_IN_IFLOOR:
2766 case BUILT_IN_LFLOOR:
2767 case BUILT_IN_LLFLOOR:
2768 name = "floor";
2769 break;
2770 case BUILT_IN_IFLOORF:
2771 case BUILT_IN_LFLOORF:
2772 case BUILT_IN_LLFLOORF:
2773 name = "floorf";
2774 break;
2775 case BUILT_IN_IFLOORL:
2776 case BUILT_IN_LFLOORL:
2777 case BUILT_IN_LLFLOORL:
2778 name = "floorl";
2779 break;
2780 default:
2781 gcc_unreachable ();
2784 fntype = build_function_type_list (TREE_TYPE (arg),
2785 TREE_TYPE (arg), NULL_TREE);
2786 fallback_fndecl = build_fn_decl (name, fntype);
2789 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2791 tmp = expand_normal (exp);
2792 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2794 /* Truncate the result of floating point optab to integer
2795 via expand_fix (). */
2796 target = gen_reg_rtx (mode);
2797 expand_fix (target, tmp, 0);
2799 return target;
2802 /* Expand a call to one of the builtin math functions doing integer
2803 conversion (lrint).
2804 Return 0 if a normal call should be emitted rather than expanding the
2805 function in-line. EXP is the expression that is a call to the builtin
2806 function; if convenient, the result should be placed in TARGET. */
2808 static rtx
2809 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2811 convert_optab builtin_optab;
2812 rtx op0;
2813 rtx_insn *insns;
2814 tree fndecl = get_callee_fndecl (exp);
2815 tree arg;
2816 machine_mode mode;
2817 enum built_in_function fallback_fn = BUILT_IN_NONE;
2819 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2820 return NULL_RTX;
2822 arg = CALL_EXPR_ARG (exp, 0);
2824 switch (DECL_FUNCTION_CODE (fndecl))
2826 CASE_FLT_FN (BUILT_IN_IRINT):
2827 fallback_fn = BUILT_IN_LRINT;
2828 gcc_fallthrough ();
2829 CASE_FLT_FN (BUILT_IN_LRINT):
2830 CASE_FLT_FN (BUILT_IN_LLRINT):
2831 builtin_optab = lrint_optab;
2832 break;
2834 CASE_FLT_FN (BUILT_IN_IROUND):
2835 fallback_fn = BUILT_IN_LROUND;
2836 gcc_fallthrough ();
2837 CASE_FLT_FN (BUILT_IN_LROUND):
2838 CASE_FLT_FN (BUILT_IN_LLROUND):
2839 builtin_optab = lround_optab;
2840 break;
2842 default:
2843 gcc_unreachable ();
2846 /* There's no easy way to detect the case we need to set EDOM. */
2847 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2848 return NULL_RTX;
2850 /* Make a suitable register to place result in. */
2851 mode = TYPE_MODE (TREE_TYPE (exp));
2853 /* There's no easy way to detect the case we need to set EDOM. */
2854 if (!flag_errno_math)
2856 rtx result = gen_reg_rtx (mode);
2858 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2859 need to expand the argument again. This way, we will not perform
2860 side-effects more the once. */
2861 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2863 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2865 start_sequence ();
2867 if (expand_sfix_optab (result, op0, builtin_optab))
2869 /* Output the entire sequence. */
2870 insns = get_insns ();
2871 end_sequence ();
2872 emit_insn (insns);
2873 return result;
2876 /* If we were unable to expand via the builtin, stop the sequence
2877 (without outputting the insns) and call to the library function
2878 with the stabilized argument list. */
2879 end_sequence ();
2882 if (fallback_fn != BUILT_IN_NONE)
2884 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2885 targets, (int) round (x) should never be transformed into
2886 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2887 a call to lround in the hope that the target provides at least some
2888 C99 functions. This should result in the best user experience for
2889 not full C99 targets. */
2890 tree fallback_fndecl = mathfn_built_in_1
2891 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2893 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2894 fallback_fndecl, 1, arg);
2896 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2897 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2898 return convert_to_mode (mode, target, 0);
2901 return expand_call (exp, target, target == const0_rtx);
2904 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2905 a normal call should be emitted rather than expanding the function
2906 in-line. EXP is the expression that is a call to the builtin
2907 function; if convenient, the result should be placed in TARGET. */
2909 static rtx
2910 expand_builtin_powi (tree exp, rtx target)
2912 tree arg0, arg1;
2913 rtx op0, op1;
2914 machine_mode mode;
2915 machine_mode mode2;
2917 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2918 return NULL_RTX;
2920 arg0 = CALL_EXPR_ARG (exp, 0);
2921 arg1 = CALL_EXPR_ARG (exp, 1);
2922 mode = TYPE_MODE (TREE_TYPE (exp));
2924 /* Emit a libcall to libgcc. */
2926 /* Mode of the 2nd argument must match that of an int. */
2927 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2929 if (target == NULL_RTX)
2930 target = gen_reg_rtx (mode);
2932 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2933 if (GET_MODE (op0) != mode)
2934 op0 = convert_to_mode (mode, op0, 0);
2935 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2936 if (GET_MODE (op1) != mode2)
2937 op1 = convert_to_mode (mode2, op1, 0);
2939 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2940 target, LCT_CONST, mode,
2941 op0, mode, op1, mode2);
2943 return target;
2946 /* Expand expression EXP which is a call to the strlen builtin. Return
2947 NULL_RTX if we failed and the caller should emit a normal call, otherwise
2948 try to get the result in TARGET, if convenient. */
2950 static rtx
2951 expand_builtin_strlen (tree exp, rtx target,
2952 machine_mode target_mode)
2954 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2955 return NULL_RTX;
2957 class expand_operand ops[4];
2958 rtx pat;
2959 tree len;
2960 tree src = CALL_EXPR_ARG (exp, 0);
2961 rtx src_reg;
2962 rtx_insn *before_strlen;
2963 machine_mode insn_mode;
2964 enum insn_code icode = CODE_FOR_nothing;
2965 unsigned int align;
2967 /* If the length can be computed at compile-time, return it. */
2968 len = c_strlen (src, 0);
2969 if (len)
2970 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2972 /* If the length can be computed at compile-time and is constant
2973 integer, but there are side-effects in src, evaluate
2974 src for side-effects, then return len.
2975 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2976 can be optimized into: i++; x = 3; */
2977 len = c_strlen (src, 1);
2978 if (len && TREE_CODE (len) == INTEGER_CST)
2980 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2981 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2984 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2986 /* If SRC is not a pointer type, don't do this operation inline. */
2987 if (align == 0)
2988 return NULL_RTX;
2990 /* Bail out if we can't compute strlen in the right mode. */
2991 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2993 icode = optab_handler (strlen_optab, insn_mode);
2994 if (icode != CODE_FOR_nothing)
2995 break;
2997 if (insn_mode == VOIDmode)
2998 return NULL_RTX;
3000 /* Make a place to hold the source address. We will not expand
3001 the actual source until we are sure that the expansion will
3002 not fail -- there are trees that cannot be expanded twice. */
3003 src_reg = gen_reg_rtx (Pmode);
3005 /* Mark the beginning of the strlen sequence so we can emit the
3006 source operand later. */
3007 before_strlen = get_last_insn ();
3009 create_output_operand (&ops[0], target, insn_mode);
3010 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3011 create_integer_operand (&ops[2], 0);
3012 create_integer_operand (&ops[3], align);
3013 if (!maybe_expand_insn (icode, 4, ops))
3014 return NULL_RTX;
3016 /* Check to see if the argument was declared attribute nonstring
3017 and if so, issue a warning since at this point it's not known
3018 to be nul-terminated. */
3019 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3021 /* Now that we are assured of success, expand the source. */
3022 start_sequence ();
3023 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3024 if (pat != src_reg)
3026 #ifdef POINTERS_EXTEND_UNSIGNED
3027 if (GET_MODE (pat) != Pmode)
3028 pat = convert_to_mode (Pmode, pat,
3029 POINTERS_EXTEND_UNSIGNED);
3030 #endif
3031 emit_move_insn (src_reg, pat);
3033 pat = get_insns ();
3034 end_sequence ();
3036 if (before_strlen)
3037 emit_insn_after (pat, before_strlen);
3038 else
3039 emit_insn_before (pat, get_insns ());
3041 /* Return the value in the proper mode for this function. */
3042 if (GET_MODE (ops[0].value) == target_mode)
3043 target = ops[0].value;
3044 else if (target != 0)
3045 convert_move (target, ops[0].value, 0);
3046 else
3047 target = convert_to_mode (target_mode, ops[0].value, 0);
3049 return target;
3052 /* Expand call EXP to the strnlen built-in, returning the result
3053 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3055 static rtx
3056 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3058 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3059 return NULL_RTX;
3061 tree src = CALL_EXPR_ARG (exp, 0);
3062 tree bound = CALL_EXPR_ARG (exp, 1);
3064 if (!bound)
3065 return NULL_RTX;
3067 location_t loc = UNKNOWN_LOCATION;
3068 if (EXPR_HAS_LOCATION (exp))
3069 loc = EXPR_LOCATION (exp);
3071 tree maxobjsize = max_object_size ();
3072 tree func = get_callee_fndecl (exp);
3074 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3075 so these conversions aren't necessary. */
3076 c_strlen_data lendata = { };
3077 tree len = c_strlen (src, 0, &lendata, 1);
3078 if (len)
3079 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3081 if (TREE_CODE (bound) == INTEGER_CST)
3083 if (!TREE_NO_WARNING (exp)
3084 && tree_int_cst_lt (maxobjsize, bound)
3085 && warning_at (loc, OPT_Wstringop_overflow_,
3086 "%K%qD specified bound %E "
3087 "exceeds maximum object size %E",
3088 exp, func, bound, maxobjsize))
3089 TREE_NO_WARNING (exp) = true;
3091 bool exact = true;
3092 if (!len || TREE_CODE (len) != INTEGER_CST)
3094 /* Clear EXACT if LEN may be less than SRC suggests,
3095 such as in
3096 strnlen (&a[i], sizeof a)
3097 where the value of i is unknown. Unless i's value is
3098 zero, the call is unsafe because the bound is greater. */
3099 lendata.decl = unterminated_array (src, &len, &exact);
3100 if (!lendata.decl)
3101 return NULL_RTX;
3104 if (lendata.decl
3105 && !TREE_NO_WARNING (exp)
3106 && ((tree_int_cst_lt (len, bound))
3107 || !exact))
3109 location_t warnloc
3110 = expansion_point_location_if_in_system_header (loc);
3112 if (warning_at (warnloc, OPT_Wstringop_overflow_,
3113 exact
3114 ? G_("%K%qD specified bound %E exceeds the size %E "
3115 "of unterminated array")
3116 : G_("%K%qD specified bound %E may exceed the size "
3117 "of at most %E of unterminated array"),
3118 exp, func, bound, len))
3120 inform (DECL_SOURCE_LOCATION (lendata.decl),
3121 "referenced argument declared here");
3122 TREE_NO_WARNING (exp) = true;
3123 return NULL_RTX;
3127 if (!len)
3128 return NULL_RTX;
3130 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3131 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3134 if (TREE_CODE (bound) != SSA_NAME)
3135 return NULL_RTX;
3137 wide_int min, max;
3138 enum value_range_kind rng = get_range_info (bound, &min, &max);
3139 if (rng != VR_RANGE)
3140 return NULL_RTX;
3142 if (!TREE_NO_WARNING (exp)
3143 && wi::ltu_p (wi::to_wide (maxobjsize, min.get_precision ()), min)
3144 && warning_at (loc, OPT_Wstringop_overflow_,
3145 "%K%qD specified bound [%wu, %wu] "
3146 "exceeds maximum object size %E",
3147 exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
3148 TREE_NO_WARNING (exp) = true;
3150 bool exact = true;
3151 if (!len || TREE_CODE (len) != INTEGER_CST)
3153 lendata.decl = unterminated_array (src, &len, &exact);
3154 if (!lendata.decl)
3155 return NULL_RTX;
3158 if (lendata.decl
3159 && !TREE_NO_WARNING (exp)
3160 && (wi::ltu_p (wi::to_wide (len), min)
3161 || !exact))
3163 location_t warnloc
3164 = expansion_point_location_if_in_system_header (loc);
3166 if (warning_at (warnloc, OPT_Wstringop_overflow_,
3167 exact
3168 ? G_("%K%qD specified bound [%wu, %wu] exceeds "
3169 "the size %E of unterminated array")
3170 : G_("%K%qD specified bound [%wu, %wu] may exceed "
3171 "the size of at most %E of unterminated array"),
3172 exp, func, min.to_uhwi (), max.to_uhwi (), len))
3174 inform (DECL_SOURCE_LOCATION (lendata.decl),
3175 "referenced argument declared here");
3176 TREE_NO_WARNING (exp) = true;
3180 if (lendata.decl)
3181 return NULL_RTX;
3183 if (wi::gtu_p (min, wi::to_wide (len)))
3184 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3186 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3187 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3190 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3191 bytes from constant string DATA + OFFSET and return it as target
3192 constant. */
3194 static rtx
3195 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3196 scalar_int_mode mode)
3198 const char *str = (const char *) data;
3200 gcc_assert (offset >= 0
3201 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3202 <= strlen (str) + 1));
3204 return c_readstr (str + offset, mode);
3207 /* LEN specify length of the block of memcpy/memset operation.
3208 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3209 In some cases we can make very likely guess on max size, then we
3210 set it into PROBABLE_MAX_SIZE. */
3212 static void
3213 determine_block_size (tree len, rtx len_rtx,
3214 unsigned HOST_WIDE_INT *min_size,
3215 unsigned HOST_WIDE_INT *max_size,
3216 unsigned HOST_WIDE_INT *probable_max_size)
3218 if (CONST_INT_P (len_rtx))
3220 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3221 return;
3223 else
3225 wide_int min, max;
3226 enum value_range_kind range_type = VR_UNDEFINED;
3228 /* Determine bounds from the type. */
3229 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3230 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3231 else
3232 *min_size = 0;
3233 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3234 *probable_max_size = *max_size
3235 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3236 else
3237 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3239 if (TREE_CODE (len) == SSA_NAME)
3240 range_type = get_range_info (len, &min, &max);
3241 if (range_type == VR_RANGE)
3243 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3244 *min_size = min.to_uhwi ();
3245 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3246 *probable_max_size = *max_size = max.to_uhwi ();
3248 else if (range_type == VR_ANTI_RANGE)
3250 /* Anti range 0...N lets us to determine minimal size to N+1. */
3251 if (min == 0)
3253 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3254 *min_size = max.to_uhwi () + 1;
3256 /* Code like
3258 int n;
3259 if (n < 100)
3260 memcpy (a, b, n)
3262 Produce anti range allowing negative values of N. We still
3263 can use the information and make a guess that N is not negative.
3265 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3266 *probable_max_size = min.to_uhwi () - 1;
3269 gcc_checking_assert (*max_size <=
3270 (unsigned HOST_WIDE_INT)
3271 GET_MODE_MASK (GET_MODE (len_rtx)));
3274 /* Try to verify that the sizes and lengths of the arguments to a string
3275 manipulation function given by EXP are within valid bounds and that
3276 the operation does not lead to buffer overflow or read past the end.
3277 Arguments other than EXP may be null. When non-null, the arguments
3278 have the following meaning:
3279 DST is the destination of a copy call or NULL otherwise.
3280 SRC is the source of a copy call or NULL otherwise.
3281 DSTWRITE is the number of bytes written into the destination obtained
3282 from the user-supplied size argument to the function (such as in
3283 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3284 MAXREAD is the user-supplied bound on the length of the source sequence
3285 (such as in strncat(d, s, N). It specifies the upper limit on the number
3286 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3287 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3288 expression EXP is a string function call (as opposed to a memory call
3289 like memcpy). As an exception, SRCSTR can also be an integer denoting
3290 the precomputed size of the source string or object (for functions like
3291 memcpy).
3292 DSTSIZE is the size of the destination object specified by the last
3293 argument to the _chk builtins, typically resulting from the expansion
3294 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3295 DSTSIZE).
3297 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3298 SIZE_MAX.
3300 If the call is successfully verified as safe return true, otherwise
3301 return false. */
3303 static bool
3304 check_access (tree exp, tree, tree, tree dstwrite,
3305 tree maxread, tree srcstr, tree dstsize)
3307 int opt = OPT_Wstringop_overflow_;
3309 /* The size of the largest object is half the address space, or
3310 PTRDIFF_MAX. (This is way too permissive.) */
3311 tree maxobjsize = max_object_size ();
3313 /* Either the length of the source string for string functions or
3314 the size of the source object for raw memory functions. */
3315 tree slen = NULL_TREE;
3317 tree range[2] = { NULL_TREE, NULL_TREE };
3319 /* Set to true when the exact number of bytes written by a string
3320 function like strcpy is not known and the only thing that is
3321 known is that it must be at least one (for the terminating nul). */
3322 bool at_least_one = false;
3323 if (srcstr)
3325 /* SRCSTR is normally a pointer to string but as a special case
3326 it can be an integer denoting the length of a string. */
3327 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3329 /* Try to determine the range of lengths the source string
3330 refers to. If it can be determined and is less than
3331 the upper bound given by MAXREAD add one to it for
3332 the terminating nul. Otherwise, set it to one for
3333 the same reason, or to MAXREAD as appropriate. */
3334 c_strlen_data lendata = { };
3335 get_range_strlen (srcstr, &lendata, /* eltsize = */ 1);
3336 range[0] = lendata.minlen;
3337 range[1] = lendata.maxbound ? lendata.maxbound : lendata.maxlen;
3338 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3340 if (maxread && tree_int_cst_le (maxread, range[0]))
3341 range[0] = range[1] = maxread;
3342 else
3343 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3344 range[0], size_one_node);
3346 if (maxread && tree_int_cst_le (maxread, range[1]))
3347 range[1] = maxread;
3348 else if (!integer_all_onesp (range[1]))
3349 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3350 range[1], size_one_node);
3352 slen = range[0];
3354 else
3356 at_least_one = true;
3357 slen = size_one_node;
3360 else
3361 slen = srcstr;
3364 if (!dstwrite && !maxread)
3366 /* When the only available piece of data is the object size
3367 there is nothing to do. */
3368 if (!slen)
3369 return true;
3371 /* Otherwise, when the length of the source sequence is known
3372 (as with strlen), set DSTWRITE to it. */
3373 if (!range[0])
3374 dstwrite = slen;
3377 if (!dstsize)
3378 dstsize = maxobjsize;
3380 if (dstwrite)
3381 get_size_range (dstwrite, range);
3383 tree func = get_callee_fndecl (exp);
3385 /* First check the number of bytes to be written against the maximum
3386 object size. */
3387 if (range[0]
3388 && TREE_CODE (range[0]) == INTEGER_CST
3389 && tree_int_cst_lt (maxobjsize, range[0]))
3391 if (TREE_NO_WARNING (exp))
3392 return false;
3394 location_t loc = tree_nonartificial_location (exp);
3395 loc = expansion_point_location_if_in_system_header (loc);
3397 bool warned;
3398 if (range[0] == range[1])
3399 warned = warning_at (loc, opt,
3400 "%K%qD specified size %E "
3401 "exceeds maximum object size %E",
3402 exp, func, range[0], maxobjsize);
3403 else
3404 warned = warning_at (loc, opt,
3405 "%K%qD specified size between %E and %E "
3406 "exceeds maximum object size %E",
3407 exp, func,
3408 range[0], range[1], maxobjsize);
3409 if (warned)
3410 TREE_NO_WARNING (exp) = true;
3412 return false;
3415 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3416 constant, and in range of unsigned HOST_WIDE_INT. */
3417 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3419 /* Next check the number of bytes to be written against the destination
3420 object size. */
3421 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3423 if (range[0]
3424 && TREE_CODE (range[0]) == INTEGER_CST
3425 && ((tree_fits_uhwi_p (dstsize)
3426 && tree_int_cst_lt (dstsize, range[0]))
3427 || (dstwrite
3428 && tree_fits_uhwi_p (dstwrite)
3429 && tree_int_cst_lt (dstwrite, range[0]))))
3431 if (TREE_NO_WARNING (exp))
3432 return false;
3434 location_t loc = tree_nonartificial_location (exp);
3435 loc = expansion_point_location_if_in_system_header (loc);
3437 if (dstwrite == slen && at_least_one)
3439 /* This is a call to strcpy with a destination of 0 size
3440 and a source of unknown length. The call will write
3441 at least one byte past the end of the destination. */
3442 warning_at (loc, opt,
3443 "%K%qD writing %E or more bytes into a region "
3444 "of size %E overflows the destination",
3445 exp, func, range[0], dstsize);
3447 else if (tree_int_cst_equal (range[0], range[1]))
3448 warning_n (loc, opt, tree_to_uhwi (range[0]),
3449 "%K%qD writing %E byte into a region "
3450 "of size %E overflows the destination",
3451 "%K%qD writing %E bytes into a region "
3452 "of size %E overflows the destination",
3453 exp, func, range[0], dstsize);
3454 else if (tree_int_cst_sign_bit (range[1]))
3456 /* Avoid printing the upper bound if it's invalid. */
3457 warning_at (loc, opt,
3458 "%K%qD writing %E or more bytes into a region "
3459 "of size %E overflows the destination",
3460 exp, func, range[0], dstsize);
3462 else
3463 warning_at (loc, opt,
3464 "%K%qD writing between %E and %E bytes into "
3465 "a region of size %E overflows the destination",
3466 exp, func, range[0], range[1],
3467 dstsize);
3469 /* Return error when an overflow has been detected. */
3470 return false;
3474 /* Check the maximum length of the source sequence against the size
3475 of the destination object if known, or against the maximum size
3476 of an object. */
3477 if (maxread)
3479 get_size_range (maxread, range);
3480 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3482 location_t loc = tree_nonartificial_location (exp);
3483 loc = expansion_point_location_if_in_system_header (loc);
3485 if (tree_int_cst_lt (maxobjsize, range[0]))
3487 if (TREE_NO_WARNING (exp))
3488 return false;
3490 /* Warn about crazy big sizes first since that's more
3491 likely to be meaningful than saying that the bound
3492 is greater than the object size if both are big. */
3493 if (range[0] == range[1])
3494 warning_at (loc, opt,
3495 "%K%qD specified bound %E "
3496 "exceeds maximum object size %E",
3497 exp, func,
3498 range[0], maxobjsize);
3499 else
3500 warning_at (loc, opt,
3501 "%K%qD specified bound between %E and %E "
3502 "exceeds maximum object size %E",
3503 exp, func,
3504 range[0], range[1], maxobjsize);
3506 return false;
3509 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3511 if (TREE_NO_WARNING (exp))
3512 return false;
3514 if (tree_int_cst_equal (range[0], range[1]))
3515 warning_at (loc, opt,
3516 "%K%qD specified bound %E "
3517 "exceeds destination size %E",
3518 exp, func,
3519 range[0], dstsize);
3520 else
3521 warning_at (loc, opt,
3522 "%K%qD specified bound between %E and %E "
3523 "exceeds destination size %E",
3524 exp, func,
3525 range[0], range[1], dstsize);
3526 return false;
3531 /* Check for reading past the end of SRC. */
3532 if (slen
3533 && slen == srcstr
3534 && dstwrite && range[0]
3535 && tree_int_cst_lt (slen, range[0]))
3537 if (TREE_NO_WARNING (exp))
3538 return false;
3540 location_t loc = tree_nonartificial_location (exp);
3542 if (tree_int_cst_equal (range[0], range[1]))
3543 warning_n (loc, opt, tree_to_uhwi (range[0]),
3544 "%K%qD reading %E byte from a region of size %E",
3545 "%K%qD reading %E bytes from a region of size %E",
3546 exp, func, range[0], slen);
3547 else if (tree_int_cst_sign_bit (range[1]))
3549 /* Avoid printing the upper bound if it's invalid. */
3550 warning_at (loc, opt,
3551 "%K%qD reading %E or more bytes from a region "
3552 "of size %E",
3553 exp, func, range[0], slen);
3555 else
3556 warning_at (loc, opt,
3557 "%K%qD reading between %E and %E bytes from a region "
3558 "of size %E",
3559 exp, func, range[0], range[1], slen);
3560 return false;
3563 return true;
3566 /* Helper to compute the size of the object referenced by the DEST
3567 expression which must have pointer type, using Object Size type
3568 OSTYPE (only the least significant 2 bits are used). Return
3569 an estimate of the size of the object if successful or NULL when
3570 the size cannot be determined. When the referenced object involves
3571 a non-constant offset in some range the returned value represents
3572 the largest size given the smallest non-negative offset in the
3573 range. If nonnull, set *PDECL to the decl of the referenced
3574 subobject if it can be determined, or to null otherwise.
3575 The function is intended for diagnostics and should not be used
3576 to influence code generation or optimization. */
3578 tree
3579 compute_objsize (tree dest, int ostype, tree *pdecl /* = NULL */)
3581 tree dummy = NULL_TREE;
3582 if (!pdecl)
3583 pdecl = &dummy;
3585 unsigned HOST_WIDE_INT size;
3587 /* Only the two least significant bits are meaningful. */
3588 ostype &= 3;
3590 if (compute_builtin_object_size (dest, ostype, &size, pdecl))
3591 return build_int_cst (sizetype, size);
3593 if (TREE_CODE (dest) == SSA_NAME)
3595 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3596 if (!is_gimple_assign (stmt))
3597 return NULL_TREE;
3599 dest = gimple_assign_rhs1 (stmt);
3601 tree_code code = gimple_assign_rhs_code (stmt);
3602 if (code == POINTER_PLUS_EXPR)
3604 /* compute_builtin_object_size fails for addresses with
3605 non-constant offsets. Try to determine the range of
3606 such an offset here and use it to adjust the constant
3607 size. */
3608 tree off = gimple_assign_rhs2 (stmt);
3609 if (TREE_CODE (off) == INTEGER_CST)
3611 if (tree size = compute_objsize (dest, ostype, pdecl))
3613 wide_int wioff = wi::to_wide (off);
3614 wide_int wisiz = wi::to_wide (size);
3616 /* Ignore negative offsets for now. For others,
3617 use the lower bound as the most optimistic
3618 estimate of the (remaining) size. */
3619 if (wi::sign_mask (wioff))
3621 else if (wi::ltu_p (wioff, wisiz))
3622 return wide_int_to_tree (TREE_TYPE (size),
3623 wi::sub (wisiz, wioff));
3624 else
3625 return size_zero_node;
3628 else if (TREE_CODE (off) == SSA_NAME
3629 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3631 wide_int min, max;
3632 enum value_range_kind rng = get_range_info (off, &min, &max);
3634 if (rng == VR_RANGE)
3636 if (tree size = compute_objsize (dest, ostype, pdecl))
3638 wide_int wisiz = wi::to_wide (size);
3640 /* Ignore negative offsets for now. For others,
3641 use the lower bound as the most optimistic
3642 estimate of the (remaining)size. */
3643 if (wi::sign_mask (min)
3644 || wi::sign_mask (max))
3646 else if (wi::ltu_p (min, wisiz))
3647 return wide_int_to_tree (TREE_TYPE (size),
3648 wi::sub (wisiz, min));
3649 else
3650 return size_zero_node;
3655 else if (code != ADDR_EXPR)
3656 return NULL_TREE;
3659 /* Unless computing the largest size (for memcpy and other raw memory
3660 functions), try to determine the size of the object from its type. */
3661 if (!ostype)
3662 return NULL_TREE;
3664 if (TREE_CODE (dest) == ARRAY_REF
3665 || TREE_CODE (dest) == MEM_REF)
3667 tree ref = TREE_OPERAND (dest, 0);
3668 tree off = TREE_OPERAND (dest, 1);
3669 if (tree size = compute_objsize (ref, ostype, pdecl))
3671 /* If the declaration of the destination object is known
3672 to have zero size, return zero. */
3673 if (integer_zerop (size))
3674 return integer_zero_node;
3676 if (TREE_CODE (off) != INTEGER_CST
3677 || TREE_CODE (size) != INTEGER_CST)
3678 return NULL_TREE;
3680 if (TREE_CODE (dest) == ARRAY_REF)
3682 tree eltype = TREE_TYPE (dest);
3683 if (tree tpsize = TYPE_SIZE_UNIT (eltype))
3684 off = fold_build2 (MULT_EXPR, size_type_node, off, tpsize);
3685 else
3686 return NULL_TREE;
3689 if (tree_int_cst_lt (off, size))
3690 return fold_build2 (MINUS_EXPR, size_type_node, size, off);
3691 return integer_zero_node;
3694 return NULL_TREE;
3697 if (TREE_CODE (dest) == COMPONENT_REF)
3699 *pdecl = TREE_OPERAND (dest, 1);
3700 return component_ref_size (dest);
3703 if (TREE_CODE (dest) != ADDR_EXPR)
3704 return NULL_TREE;
3706 tree ref = TREE_OPERAND (dest, 0);
3707 if (DECL_P (ref))
3709 *pdecl = ref;
3710 return DECL_SIZE_UNIT (ref);
3713 tree type = TREE_TYPE (dest);
3714 if (TREE_CODE (type) == POINTER_TYPE)
3715 type = TREE_TYPE (type);
3717 type = TYPE_MAIN_VARIANT (type);
3719 if (TREE_CODE (type) == ARRAY_TYPE
3720 && !array_at_struct_end_p (ref))
3722 if (tree size = TYPE_SIZE_UNIT (type))
3723 return TREE_CODE (size) == INTEGER_CST ? size : NULL_TREE;
3726 return NULL_TREE;
3729 /* Helper to determine and check the sizes of the source and the destination
3730 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3731 call expression, DEST is the destination argument, SRC is the source
3732 argument or null, and LEN is the number of bytes. Use Object Size type-0
3733 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3734 (no overflow or invalid sizes), false otherwise. */
3736 static bool
3737 check_memop_access (tree exp, tree dest, tree src, tree size)
3739 /* For functions like memset and memcpy that operate on raw memory
3740 try to determine the size of the largest source and destination
3741 object using type-0 Object Size regardless of the object size
3742 type specified by the option. */
3743 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3744 tree dstsize = compute_objsize (dest, 0);
3746 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3747 srcsize, dstsize);
3750 /* Validate memchr arguments without performing any expansion.
3751 Return NULL_RTX. */
3753 static rtx
3754 expand_builtin_memchr (tree exp, rtx)
3756 if (!validate_arglist (exp,
3757 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3758 return NULL_RTX;
3760 tree arg1 = CALL_EXPR_ARG (exp, 0);
3761 tree len = CALL_EXPR_ARG (exp, 2);
3763 /* Diagnose calls where the specified length exceeds the size
3764 of the object. */
3765 if (warn_stringop_overflow)
3767 tree size = compute_objsize (arg1, 0);
3768 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3769 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
3772 return NULL_RTX;
3775 /* Expand a call EXP to the memcpy builtin.
3776 Return NULL_RTX if we failed, the caller should emit a normal call,
3777 otherwise try to get the result in TARGET, if convenient (and in
3778 mode MODE if that's convenient). */
3780 static rtx
3781 expand_builtin_memcpy (tree exp, rtx target)
3783 if (!validate_arglist (exp,
3784 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3785 return NULL_RTX;
3787 tree dest = CALL_EXPR_ARG (exp, 0);
3788 tree src = CALL_EXPR_ARG (exp, 1);
3789 tree len = CALL_EXPR_ARG (exp, 2);
3791 check_memop_access (exp, dest, src, len);
3793 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3794 /*retmode=*/ RETURN_BEGIN, false);
3797 /* Check a call EXP to the memmove built-in for validity.
3798 Return NULL_RTX on both success and failure. */
3800 static rtx
3801 expand_builtin_memmove (tree exp, rtx target)
3803 if (!validate_arglist (exp,
3804 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3805 return NULL_RTX;
3807 tree dest = CALL_EXPR_ARG (exp, 0);
3808 tree src = CALL_EXPR_ARG (exp, 1);
3809 tree len = CALL_EXPR_ARG (exp, 2);
3811 check_memop_access (exp, dest, src, len);
3813 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3814 /*retmode=*/ RETURN_BEGIN, true);
3817 /* Expand a call EXP to the mempcpy builtin.
3818 Return NULL_RTX if we failed; the caller should emit a normal call,
3819 otherwise try to get the result in TARGET, if convenient (and in
3820 mode MODE if that's convenient). */
3822 static rtx
3823 expand_builtin_mempcpy (tree exp, rtx target)
3825 if (!validate_arglist (exp,
3826 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3827 return NULL_RTX;
3829 tree dest = CALL_EXPR_ARG (exp, 0);
3830 tree src = CALL_EXPR_ARG (exp, 1);
3831 tree len = CALL_EXPR_ARG (exp, 2);
3833 /* Policy does not generally allow using compute_objsize (which
3834 is used internally by check_memop_size) to change code generation
3835 or drive optimization decisions.
3837 In this instance it is safe because the code we generate has
3838 the same semantics regardless of the return value of
3839 check_memop_sizes. Exactly the same amount of data is copied
3840 and the return value is exactly the same in both cases.
3842 Furthermore, check_memop_size always uses mode 0 for the call to
3843 compute_objsize, so the imprecise nature of compute_objsize is
3844 avoided. */
3846 /* Avoid expanding mempcpy into memcpy when the call is determined
3847 to overflow the buffer. This also prevents the same overflow
3848 from being diagnosed again when expanding memcpy. */
3849 if (!check_memop_access (exp, dest, src, len))
3850 return NULL_RTX;
3852 return expand_builtin_mempcpy_args (dest, src, len,
3853 target, exp, /*retmode=*/ RETURN_END);
3856 /* Helper function to do the actual work for expand of memory copy family
3857 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3858 of memory from SRC to DEST and assign to TARGET if convenient. Return
3859 value is based on RETMODE argument. */
3861 static rtx
3862 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3863 rtx target, tree exp, memop_ret retmode,
3864 bool might_overlap)
3866 const char *src_str;
3867 unsigned int src_align = get_pointer_alignment (src);
3868 unsigned int dest_align = get_pointer_alignment (dest);
3869 rtx dest_mem, src_mem, dest_addr, len_rtx;
3870 HOST_WIDE_INT expected_size = -1;
3871 unsigned int expected_align = 0;
3872 unsigned HOST_WIDE_INT min_size;
3873 unsigned HOST_WIDE_INT max_size;
3874 unsigned HOST_WIDE_INT probable_max_size;
3876 bool is_move_done;
3878 /* If DEST is not a pointer type, call the normal function. */
3879 if (dest_align == 0)
3880 return NULL_RTX;
3882 /* If either SRC is not a pointer type, don't do this
3883 operation in-line. */
3884 if (src_align == 0)
3885 return NULL_RTX;
3887 if (currently_expanding_gimple_stmt)
3888 stringop_block_profile (currently_expanding_gimple_stmt,
3889 &expected_align, &expected_size);
3891 if (expected_align < dest_align)
3892 expected_align = dest_align;
3893 dest_mem = get_memory_rtx (dest, len);
3894 set_mem_align (dest_mem, dest_align);
3895 len_rtx = expand_normal (len);
3896 determine_block_size (len, len_rtx, &min_size, &max_size,
3897 &probable_max_size);
3898 src_str = c_getstr (src);
3900 /* If SRC is a string constant and block move would be done by
3901 pieces, we can avoid loading the string from memory and only
3902 stored the computed constants. This works in the overlap
3903 (memmove) case as well because store_by_pieces just generates a
3904 series of stores of constants from the string constant returned
3905 by c_getstr(). */
3906 if (src_str
3907 && CONST_INT_P (len_rtx)
3908 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3909 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3910 CONST_CAST (char *, src_str),
3911 dest_align, false))
3913 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3914 builtin_memcpy_read_str,
3915 CONST_CAST (char *, src_str),
3916 dest_align, false, retmode);
3917 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3918 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3919 return dest_mem;
3922 src_mem = get_memory_rtx (src, len);
3923 set_mem_align (src_mem, src_align);
3925 /* Copy word part most expediently. */
3926 enum block_op_methods method = BLOCK_OP_NORMAL;
3927 if (CALL_EXPR_TAILCALL (exp)
3928 && (retmode == RETURN_BEGIN || target == const0_rtx))
3929 method = BLOCK_OP_TAILCALL;
3930 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
3931 && retmode == RETURN_END
3932 && !might_overlap
3933 && target != const0_rtx);
3934 if (use_mempcpy_call)
3935 method = BLOCK_OP_NO_LIBCALL_RET;
3936 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3937 expected_align, expected_size,
3938 min_size, max_size, probable_max_size,
3939 use_mempcpy_call, &is_move_done, might_overlap);
3941 /* Bail out when a mempcpy call would be expanded as libcall and when
3942 we have a target that provides a fast implementation
3943 of mempcpy routine. */
3944 if (!is_move_done)
3945 return NULL_RTX;
3947 if (dest_addr == pc_rtx)
3948 return NULL_RTX;
3950 if (dest_addr == 0)
3952 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3953 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3956 if (retmode != RETURN_BEGIN && target != const0_rtx)
3958 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3959 /* stpcpy pointer to last byte. */
3960 if (retmode == RETURN_END_MINUS_ONE)
3961 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3964 return dest_addr;
3967 static rtx
3968 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3969 rtx target, tree orig_exp, memop_ret retmode)
3971 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3972 retmode, false);
3975 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3976 we failed, the caller should emit a normal call, otherwise try to
3977 get the result in TARGET, if convenient.
3978 Return value is based on RETMODE argument. */
3980 static rtx
3981 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
3983 class expand_operand ops[3];
3984 rtx dest_mem;
3985 rtx src_mem;
3987 if (!targetm.have_movstr ())
3988 return NULL_RTX;
3990 dest_mem = get_memory_rtx (dest, NULL);
3991 src_mem = get_memory_rtx (src, NULL);
3992 if (retmode == RETURN_BEGIN)
3994 target = force_reg (Pmode, XEXP (dest_mem, 0));
3995 dest_mem = replace_equiv_address (dest_mem, target);
3998 create_output_operand (&ops[0],
3999 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
4000 create_fixed_operand (&ops[1], dest_mem);
4001 create_fixed_operand (&ops[2], src_mem);
4002 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
4003 return NULL_RTX;
4005 if (retmode != RETURN_BEGIN && target != const0_rtx)
4007 target = ops[0].value;
4008 /* movstr is supposed to set end to the address of the NUL
4009 terminator. If the caller requested a mempcpy-like return value,
4010 adjust it. */
4011 if (retmode == RETURN_END)
4013 rtx tem = plus_constant (GET_MODE (target),
4014 gen_lowpart (GET_MODE (target), target), 1);
4015 emit_move_insn (target, force_operand (tem, NULL_RTX));
4018 return target;
4021 /* Do some very basic size validation of a call to the strcpy builtin
4022 given by EXP. Return NULL_RTX to have the built-in expand to a call
4023 to the library function. */
4025 static rtx
4026 expand_builtin_strcat (tree exp, rtx)
4028 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
4029 || !warn_stringop_overflow)
4030 return NULL_RTX;
4032 tree dest = CALL_EXPR_ARG (exp, 0);
4033 tree src = CALL_EXPR_ARG (exp, 1);
4035 /* There is no way here to determine the length of the string in
4036 the destination to which the SRC string is being appended so
4037 just diagnose cases when the souce string is longer than
4038 the destination object. */
4040 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4042 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
4043 destsize);
4045 return NULL_RTX;
4048 /* Expand expression EXP, which is a call to the strcpy builtin. Return
4049 NULL_RTX if we failed the caller should emit a normal call, otherwise
4050 try to get the result in TARGET, if convenient (and in mode MODE if that's
4051 convenient). */
4053 static rtx
4054 expand_builtin_strcpy (tree exp, rtx target)
4056 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4057 return NULL_RTX;
4059 tree dest = CALL_EXPR_ARG (exp, 0);
4060 tree src = CALL_EXPR_ARG (exp, 1);
4062 if (warn_stringop_overflow)
4064 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4065 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4066 src, destsize);
4069 if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
4071 /* Check to see if the argument was declared attribute nonstring
4072 and if so, issue a warning since at this point it's not known
4073 to be nul-terminated. */
4074 tree fndecl = get_callee_fndecl (exp);
4075 maybe_warn_nonstring_arg (fndecl, exp);
4076 return ret;
4079 return NULL_RTX;
4082 /* Helper function to do the actual work for expand_builtin_strcpy. The
4083 arguments to the builtin_strcpy call DEST and SRC are broken out
4084 so that this can also be called without constructing an actual CALL_EXPR.
4085 The other arguments and return value are the same as for
4086 expand_builtin_strcpy. */
4088 static rtx
4089 expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target)
4091 /* Detect strcpy calls with unterminated arrays.. */
4092 if (tree nonstr = unterminated_array (src))
4094 /* NONSTR refers to the non-nul terminated constant array. */
4095 if (!TREE_NO_WARNING (exp))
4096 warn_string_no_nul (EXPR_LOCATION (exp), "strcpy", src, nonstr);
4097 return NULL_RTX;
4100 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
4103 /* Expand a call EXP to the stpcpy builtin.
4104 Return NULL_RTX if we failed the caller should emit a normal call,
4105 otherwise try to get the result in TARGET, if convenient (and in
4106 mode MODE if that's convenient). */
4108 static rtx
4109 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
4111 tree dst, src;
4112 location_t loc = EXPR_LOCATION (exp);
4114 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4115 return NULL_RTX;
4117 dst = CALL_EXPR_ARG (exp, 0);
4118 src = CALL_EXPR_ARG (exp, 1);
4120 if (warn_stringop_overflow)
4122 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
4123 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4124 src, destsize);
4127 /* If return value is ignored, transform stpcpy into strcpy. */
4128 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
4130 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
4131 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
4132 return expand_expr (result, target, mode, EXPAND_NORMAL);
4134 else
4136 tree len, lenp1;
4137 rtx ret;
4139 /* Ensure we get an actual string whose length can be evaluated at
4140 compile-time, not an expression containing a string. This is
4141 because the latter will potentially produce pessimized code
4142 when used to produce the return value. */
4143 c_strlen_data lendata = { };
4144 if (!c_getstr (src, NULL)
4145 || !(len = c_strlen (src, 0, &lendata, 1)))
4146 return expand_movstr (dst, src, target,
4147 /*retmode=*/ RETURN_END_MINUS_ONE);
4149 if (lendata.decl && !TREE_NO_WARNING (exp))
4150 warn_string_no_nul (EXPR_LOCATION (exp), "stpcpy", src, lendata.decl);
4152 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
4153 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
4154 target, exp,
4155 /*retmode=*/ RETURN_END_MINUS_ONE);
4157 if (ret)
4158 return ret;
4160 if (TREE_CODE (len) == INTEGER_CST)
4162 rtx len_rtx = expand_normal (len);
4164 if (CONST_INT_P (len_rtx))
4166 ret = expand_builtin_strcpy_args (exp, dst, src, target);
4168 if (ret)
4170 if (! target)
4172 if (mode != VOIDmode)
4173 target = gen_reg_rtx (mode);
4174 else
4175 target = gen_reg_rtx (GET_MODE (ret));
4177 if (GET_MODE (target) != GET_MODE (ret))
4178 ret = gen_lowpart (GET_MODE (target), ret);
4180 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
4181 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
4182 gcc_assert (ret);
4184 return target;
4189 return expand_movstr (dst, src, target,
4190 /*retmode=*/ RETURN_END_MINUS_ONE);
4194 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
4195 arguments while being careful to avoid duplicate warnings (which could
4196 be issued if the expander were to expand the call, resulting in it
4197 being emitted in expand_call(). */
4199 static rtx
4200 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
4202 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
4204 /* The call has been successfully expanded. Check for nonstring
4205 arguments and issue warnings as appropriate. */
4206 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
4207 return ret;
4210 return NULL_RTX;
4213 /* Check a call EXP to the stpncpy built-in for validity.
4214 Return NULL_RTX on both success and failure. */
4216 static rtx
4217 expand_builtin_stpncpy (tree exp, rtx)
4219 if (!validate_arglist (exp,
4220 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4221 || !warn_stringop_overflow)
4222 return NULL_RTX;
4224 /* The source and destination of the call. */
4225 tree dest = CALL_EXPR_ARG (exp, 0);
4226 tree src = CALL_EXPR_ARG (exp, 1);
4228 /* The exact number of bytes to write (not the maximum). */
4229 tree len = CALL_EXPR_ARG (exp, 2);
4231 /* The size of the destination object. */
4232 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4234 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
4236 return NULL_RTX;
4239 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4240 bytes from constant string DATA + OFFSET and return it as target
4241 constant. */
4244 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
4245 scalar_int_mode mode)
4247 const char *str = (const char *) data;
4249 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4250 return const0_rtx;
4252 return c_readstr (str + offset, mode);
4255 /* Helper to check the sizes of sequences and the destination of calls
4256 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4257 success (no overflow or invalid sizes), false otherwise. */
4259 static bool
4260 check_strncat_sizes (tree exp, tree objsize)
4262 tree dest = CALL_EXPR_ARG (exp, 0);
4263 tree src = CALL_EXPR_ARG (exp, 1);
4264 tree maxread = CALL_EXPR_ARG (exp, 2);
4266 /* Try to determine the range of lengths that the source expression
4267 refers to. */
4268 c_strlen_data lendata = { };
4269 get_range_strlen (src, &lendata, /* eltsize = */ 1);
4271 /* Try to verify that the destination is big enough for the shortest
4272 string. */
4274 if (!objsize && warn_stringop_overflow)
4276 /* If it hasn't been provided by __strncat_chk, try to determine
4277 the size of the destination object into which the source is
4278 being copied. */
4279 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
4282 /* Add one for the terminating nul. */
4283 tree srclen = (lendata.minlen
4284 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
4285 size_one_node)
4286 : NULL_TREE);
4288 /* The strncat function copies at most MAXREAD bytes and always appends
4289 the terminating nul so the specified upper bound should never be equal
4290 to (or greater than) the size of the destination. */
4291 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4292 && tree_int_cst_equal (objsize, maxread))
4294 location_t loc = tree_nonartificial_location (exp);
4295 loc = expansion_point_location_if_in_system_header (loc);
4297 warning_at (loc, OPT_Wstringop_overflow_,
4298 "%K%qD specified bound %E equals destination size",
4299 exp, get_callee_fndecl (exp), maxread);
4301 return false;
4304 if (!srclen
4305 || (maxread && tree_fits_uhwi_p (maxread)
4306 && tree_fits_uhwi_p (srclen)
4307 && tree_int_cst_lt (maxread, srclen)))
4308 srclen = maxread;
4310 /* The number of bytes to write is LEN but check_access will also
4311 check SRCLEN if LEN's value isn't known. */
4312 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4313 objsize);
4316 /* Similar to expand_builtin_strcat, do some very basic size validation
4317 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4318 the built-in expand to a call to the library function. */
4320 static rtx
4321 expand_builtin_strncat (tree exp, rtx)
4323 if (!validate_arglist (exp,
4324 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4325 || !warn_stringop_overflow)
4326 return NULL_RTX;
4328 tree dest = CALL_EXPR_ARG (exp, 0);
4329 tree src = CALL_EXPR_ARG (exp, 1);
4330 /* The upper bound on the number of bytes to write. */
4331 tree maxread = CALL_EXPR_ARG (exp, 2);
4332 /* The length of the source sequence. */
4333 tree slen = c_strlen (src, 1);
4335 /* Try to determine the range of lengths that the source expression
4336 refers to. Since the lengths are only used for warning and not
4337 for code generation disable strict mode below. */
4338 tree maxlen = slen;
4339 if (!maxlen)
4341 c_strlen_data lendata = { };
4342 get_range_strlen (src, &lendata, /* eltsize = */ 1);
4343 maxlen = lendata.maxbound;
4346 /* Try to verify that the destination is big enough for the shortest
4347 string. First try to determine the size of the destination object
4348 into which the source is being copied. */
4349 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4351 /* Add one for the terminating nul. */
4352 tree srclen = (maxlen
4353 ? fold_build2 (PLUS_EXPR, size_type_node, maxlen,
4354 size_one_node)
4355 : NULL_TREE);
4357 /* The strncat function copies at most MAXREAD bytes and always appends
4358 the terminating nul so the specified upper bound should never be equal
4359 to (or greater than) the size of the destination. */
4360 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4361 && tree_int_cst_equal (destsize, maxread))
4363 location_t loc = tree_nonartificial_location (exp);
4364 loc = expansion_point_location_if_in_system_header (loc);
4366 warning_at (loc, OPT_Wstringop_overflow_,
4367 "%K%qD specified bound %E equals destination size",
4368 exp, get_callee_fndecl (exp), maxread);
4370 return NULL_RTX;
4373 if (!srclen
4374 || (maxread && tree_fits_uhwi_p (maxread)
4375 && tree_fits_uhwi_p (srclen)
4376 && tree_int_cst_lt (maxread, srclen)))
4377 srclen = maxread;
4379 /* The number of bytes to write is SRCLEN. */
4380 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
4382 return NULL_RTX;
4385 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4386 NULL_RTX if we failed the caller should emit a normal call. */
4388 static rtx
4389 expand_builtin_strncpy (tree exp, rtx target)
4391 location_t loc = EXPR_LOCATION (exp);
4393 if (validate_arglist (exp,
4394 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4396 tree dest = CALL_EXPR_ARG (exp, 0);
4397 tree src = CALL_EXPR_ARG (exp, 1);
4398 /* The number of bytes to write (not the maximum). */
4399 tree len = CALL_EXPR_ARG (exp, 2);
4400 /* The length of the source sequence. */
4401 tree slen = c_strlen (src, 1);
4403 if (warn_stringop_overflow)
4405 tree destsize = compute_objsize (dest,
4406 warn_stringop_overflow - 1);
4408 /* The number of bytes to write is LEN but check_access will also
4409 check SLEN if LEN's value isn't known. */
4410 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4411 destsize);
4414 /* We must be passed a constant len and src parameter. */
4415 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4416 return NULL_RTX;
4418 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4420 /* We're required to pad with trailing zeros if the requested
4421 len is greater than strlen(s2)+1. In that case try to
4422 use store_by_pieces, if it fails, punt. */
4423 if (tree_int_cst_lt (slen, len))
4425 unsigned int dest_align = get_pointer_alignment (dest);
4426 const char *p = c_getstr (src);
4427 rtx dest_mem;
4429 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4430 || !can_store_by_pieces (tree_to_uhwi (len),
4431 builtin_strncpy_read_str,
4432 CONST_CAST (char *, p),
4433 dest_align, false))
4434 return NULL_RTX;
4436 dest_mem = get_memory_rtx (dest, len);
4437 store_by_pieces (dest_mem, tree_to_uhwi (len),
4438 builtin_strncpy_read_str,
4439 CONST_CAST (char *, p), dest_align, false,
4440 RETURN_BEGIN);
4441 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4442 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4443 return dest_mem;
4446 return NULL_RTX;
4449 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4450 bytes from constant string DATA + OFFSET and return it as target
4451 constant. */
4454 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4455 scalar_int_mode mode)
4457 const char *c = (const char *) data;
4458 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4460 memset (p, *c, GET_MODE_SIZE (mode));
4462 return c_readstr (p, mode);
4465 /* Callback routine for store_by_pieces. Return the RTL of a register
4466 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4467 char value given in the RTL register data. For example, if mode is
4468 4 bytes wide, return the RTL for 0x01010101*data. */
4470 static rtx
4471 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4472 scalar_int_mode mode)
4474 rtx target, coeff;
4475 size_t size;
4476 char *p;
4478 size = GET_MODE_SIZE (mode);
4479 if (size == 1)
4480 return (rtx) data;
4482 p = XALLOCAVEC (char, size);
4483 memset (p, 1, size);
4484 coeff = c_readstr (p, mode);
4486 target = convert_to_mode (mode, (rtx) data, 1);
4487 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4488 return force_reg (mode, target);
4491 /* Expand expression EXP, which is a call to the memset builtin. Return
4492 NULL_RTX if we failed the caller should emit a normal call, otherwise
4493 try to get the result in TARGET, if convenient (and in mode MODE if that's
4494 convenient). */
4496 static rtx
4497 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4499 if (!validate_arglist (exp,
4500 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4501 return NULL_RTX;
4503 tree dest = CALL_EXPR_ARG (exp, 0);
4504 tree val = CALL_EXPR_ARG (exp, 1);
4505 tree len = CALL_EXPR_ARG (exp, 2);
4507 check_memop_access (exp, dest, NULL_TREE, len);
4509 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4512 /* Helper function to do the actual work for expand_builtin_memset. The
4513 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4514 so that this can also be called without constructing an actual CALL_EXPR.
4515 The other arguments and return value are the same as for
4516 expand_builtin_memset. */
4518 static rtx
4519 expand_builtin_memset_args (tree dest, tree val, tree len,
4520 rtx target, machine_mode mode, tree orig_exp)
4522 tree fndecl, fn;
4523 enum built_in_function fcode;
4524 machine_mode val_mode;
4525 char c;
4526 unsigned int dest_align;
4527 rtx dest_mem, dest_addr, len_rtx;
4528 HOST_WIDE_INT expected_size = -1;
4529 unsigned int expected_align = 0;
4530 unsigned HOST_WIDE_INT min_size;
4531 unsigned HOST_WIDE_INT max_size;
4532 unsigned HOST_WIDE_INT probable_max_size;
4534 dest_align = get_pointer_alignment (dest);
4536 /* If DEST is not a pointer type, don't do this operation in-line. */
4537 if (dest_align == 0)
4538 return NULL_RTX;
4540 if (currently_expanding_gimple_stmt)
4541 stringop_block_profile (currently_expanding_gimple_stmt,
4542 &expected_align, &expected_size);
4544 if (expected_align < dest_align)
4545 expected_align = dest_align;
4547 /* If the LEN parameter is zero, return DEST. */
4548 if (integer_zerop (len))
4550 /* Evaluate and ignore VAL in case it has side-effects. */
4551 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4552 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4555 /* Stabilize the arguments in case we fail. */
4556 dest = builtin_save_expr (dest);
4557 val = builtin_save_expr (val);
4558 len = builtin_save_expr (len);
4560 len_rtx = expand_normal (len);
4561 determine_block_size (len, len_rtx, &min_size, &max_size,
4562 &probable_max_size);
4563 dest_mem = get_memory_rtx (dest, len);
4564 val_mode = TYPE_MODE (unsigned_char_type_node);
4566 if (TREE_CODE (val) != INTEGER_CST)
4568 rtx val_rtx;
4570 val_rtx = expand_normal (val);
4571 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4573 /* Assume that we can memset by pieces if we can store
4574 * the coefficients by pieces (in the required modes).
4575 * We can't pass builtin_memset_gen_str as that emits RTL. */
4576 c = 1;
4577 if (tree_fits_uhwi_p (len)
4578 && can_store_by_pieces (tree_to_uhwi (len),
4579 builtin_memset_read_str, &c, dest_align,
4580 true))
4582 val_rtx = force_reg (val_mode, val_rtx);
4583 store_by_pieces (dest_mem, tree_to_uhwi (len),
4584 builtin_memset_gen_str, val_rtx, dest_align,
4585 true, RETURN_BEGIN);
4587 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4588 dest_align, expected_align,
4589 expected_size, min_size, max_size,
4590 probable_max_size))
4591 goto do_libcall;
4593 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4594 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4595 return dest_mem;
4598 if (target_char_cast (val, &c))
4599 goto do_libcall;
4601 if (c)
4603 if (tree_fits_uhwi_p (len)
4604 && can_store_by_pieces (tree_to_uhwi (len),
4605 builtin_memset_read_str, &c, dest_align,
4606 true))
4607 store_by_pieces (dest_mem, tree_to_uhwi (len),
4608 builtin_memset_read_str, &c, dest_align, true,
4609 RETURN_BEGIN);
4610 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4611 gen_int_mode (c, val_mode),
4612 dest_align, expected_align,
4613 expected_size, min_size, max_size,
4614 probable_max_size))
4615 goto do_libcall;
4617 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4618 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4619 return dest_mem;
4622 set_mem_align (dest_mem, dest_align);
4623 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4624 CALL_EXPR_TAILCALL (orig_exp)
4625 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4626 expected_align, expected_size,
4627 min_size, max_size,
4628 probable_max_size);
4630 if (dest_addr == 0)
4632 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4633 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4636 return dest_addr;
4638 do_libcall:
4639 fndecl = get_callee_fndecl (orig_exp);
4640 fcode = DECL_FUNCTION_CODE (fndecl);
4641 if (fcode == BUILT_IN_MEMSET)
4642 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4643 dest, val, len);
4644 else if (fcode == BUILT_IN_BZERO)
4645 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4646 dest, len);
4647 else
4648 gcc_unreachable ();
4649 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4650 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4651 return expand_call (fn, target, target == const0_rtx);
4654 /* Expand expression EXP, which is a call to the bzero builtin. Return
4655 NULL_RTX if we failed the caller should emit a normal call. */
4657 static rtx
4658 expand_builtin_bzero (tree exp)
4660 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4661 return NULL_RTX;
4663 tree dest = CALL_EXPR_ARG (exp, 0);
4664 tree size = CALL_EXPR_ARG (exp, 1);
4666 check_memop_access (exp, dest, NULL_TREE, size);
4668 /* New argument list transforming bzero(ptr x, int y) to
4669 memset(ptr x, int 0, size_t y). This is done this way
4670 so that if it isn't expanded inline, we fallback to
4671 calling bzero instead of memset. */
4673 location_t loc = EXPR_LOCATION (exp);
4675 return expand_builtin_memset_args (dest, integer_zero_node,
4676 fold_convert_loc (loc,
4677 size_type_node, size),
4678 const0_rtx, VOIDmode, exp);
4681 /* Try to expand cmpstr operation ICODE with the given operands.
4682 Return the result rtx on success, otherwise return null. */
4684 static rtx
4685 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4686 HOST_WIDE_INT align)
4688 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4690 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4691 target = NULL_RTX;
4693 class expand_operand ops[4];
4694 create_output_operand (&ops[0], target, insn_mode);
4695 create_fixed_operand (&ops[1], arg1_rtx);
4696 create_fixed_operand (&ops[2], arg2_rtx);
4697 create_integer_operand (&ops[3], align);
4698 if (maybe_expand_insn (icode, 4, ops))
4699 return ops[0].value;
4700 return NULL_RTX;
4703 /* Expand expression EXP, which is a call to the memcmp built-in function.
4704 Return NULL_RTX if we failed and the caller should emit a normal call,
4705 otherwise try to get the result in TARGET, if convenient.
4706 RESULT_EQ is true if we can relax the returned value to be either zero
4707 or nonzero, without caring about the sign. */
4709 static rtx
4710 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4712 if (!validate_arglist (exp,
4713 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4714 return NULL_RTX;
4716 tree arg1 = CALL_EXPR_ARG (exp, 0);
4717 tree arg2 = CALL_EXPR_ARG (exp, 1);
4718 tree len = CALL_EXPR_ARG (exp, 2);
4719 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4720 bool no_overflow = true;
4722 /* Diagnose calls where the specified length exceeds the size of either
4723 object. */
4724 tree size = compute_objsize (arg1, 0);
4725 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4726 len, /*maxread=*/NULL_TREE, size,
4727 /*objsize=*/NULL_TREE);
4728 if (no_overflow)
4730 size = compute_objsize (arg2, 0);
4731 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4732 len, /*maxread=*/NULL_TREE, size,
4733 /*objsize=*/NULL_TREE);
4736 /* If the specified length exceeds the size of either object,
4737 call the function. */
4738 if (!no_overflow)
4739 return NULL_RTX;
4741 /* Due to the performance benefit, always inline the calls first
4742 when result_eq is false. */
4743 rtx result = NULL_RTX;
4745 if (!result_eq && fcode != BUILT_IN_BCMP)
4747 result = inline_expand_builtin_string_cmp (exp, target);
4748 if (result)
4749 return result;
4752 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4753 location_t loc = EXPR_LOCATION (exp);
4755 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4756 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4758 /* If we don't have POINTER_TYPE, call the function. */
4759 if (arg1_align == 0 || arg2_align == 0)
4760 return NULL_RTX;
4762 rtx arg1_rtx = get_memory_rtx (arg1, len);
4763 rtx arg2_rtx = get_memory_rtx (arg2, len);
4764 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4766 /* Set MEM_SIZE as appropriate. */
4767 if (CONST_INT_P (len_rtx))
4769 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4770 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4773 by_pieces_constfn constfn = NULL;
4775 const char *src_str = c_getstr (arg2);
4776 if (result_eq && src_str == NULL)
4778 src_str = c_getstr (arg1);
4779 if (src_str != NULL)
4780 std::swap (arg1_rtx, arg2_rtx);
4783 /* If SRC is a string constant and block move would be done
4784 by pieces, we can avoid loading the string from memory
4785 and only stored the computed constants. */
4786 if (src_str
4787 && CONST_INT_P (len_rtx)
4788 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4789 constfn = builtin_memcpy_read_str;
4791 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4792 TREE_TYPE (len), target,
4793 result_eq, constfn,
4794 CONST_CAST (char *, src_str));
4796 if (result)
4798 /* Return the value in the proper mode for this function. */
4799 if (GET_MODE (result) == mode)
4800 return result;
4802 if (target != 0)
4804 convert_move (target, result, 0);
4805 return target;
4808 return convert_to_mode (mode, result, 0);
4811 return NULL_RTX;
4814 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4815 if we failed the caller should emit a normal call, otherwise try to get
4816 the result in TARGET, if convenient. */
4818 static rtx
4819 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4821 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4822 return NULL_RTX;
4824 /* Due to the performance benefit, always inline the calls first. */
4825 rtx result = NULL_RTX;
4826 result = inline_expand_builtin_string_cmp (exp, target);
4827 if (result)
4828 return result;
4830 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4831 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4832 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4833 return NULL_RTX;
4835 tree arg1 = CALL_EXPR_ARG (exp, 0);
4836 tree arg2 = CALL_EXPR_ARG (exp, 1);
4838 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4839 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4841 /* If we don't have POINTER_TYPE, call the function. */
4842 if (arg1_align == 0 || arg2_align == 0)
4843 return NULL_RTX;
4845 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4846 arg1 = builtin_save_expr (arg1);
4847 arg2 = builtin_save_expr (arg2);
4849 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4850 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4852 /* Try to call cmpstrsi. */
4853 if (cmpstr_icode != CODE_FOR_nothing)
4854 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4855 MIN (arg1_align, arg2_align));
4857 /* Try to determine at least one length and call cmpstrnsi. */
4858 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4860 tree len;
4861 rtx arg3_rtx;
4863 tree len1 = c_strlen (arg1, 1);
4864 tree len2 = c_strlen (arg2, 1);
4866 if (len1)
4867 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4868 if (len2)
4869 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4871 /* If we don't have a constant length for the first, use the length
4872 of the second, if we know it. We don't require a constant for
4873 this case; some cost analysis could be done if both are available
4874 but neither is constant. For now, assume they're equally cheap,
4875 unless one has side effects. If both strings have constant lengths,
4876 use the smaller. */
4878 if (!len1)
4879 len = len2;
4880 else if (!len2)
4881 len = len1;
4882 else if (TREE_SIDE_EFFECTS (len1))
4883 len = len2;
4884 else if (TREE_SIDE_EFFECTS (len2))
4885 len = len1;
4886 else if (TREE_CODE (len1) != INTEGER_CST)
4887 len = len2;
4888 else if (TREE_CODE (len2) != INTEGER_CST)
4889 len = len1;
4890 else if (tree_int_cst_lt (len1, len2))
4891 len = len1;
4892 else
4893 len = len2;
4895 /* If both arguments have side effects, we cannot optimize. */
4896 if (len && !TREE_SIDE_EFFECTS (len))
4898 arg3_rtx = expand_normal (len);
4899 result = expand_cmpstrn_or_cmpmem
4900 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4901 arg3_rtx, MIN (arg1_align, arg2_align));
4905 tree fndecl = get_callee_fndecl (exp);
4906 if (result)
4908 /* Check to see if the argument was declared attribute nonstring
4909 and if so, issue a warning since at this point it's not known
4910 to be nul-terminated. */
4911 maybe_warn_nonstring_arg (fndecl, exp);
4913 /* Return the value in the proper mode for this function. */
4914 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4915 if (GET_MODE (result) == mode)
4916 return result;
4917 if (target == 0)
4918 return convert_to_mode (mode, result, 0);
4919 convert_move (target, result, 0);
4920 return target;
4923 /* Expand the library call ourselves using a stabilized argument
4924 list to avoid re-evaluating the function's arguments twice. */
4925 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4926 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4927 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4928 return expand_call (fn, target, target == const0_rtx);
4931 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4932 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4933 the result in TARGET, if convenient. */
4935 static rtx
4936 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4937 ATTRIBUTE_UNUSED machine_mode mode)
4939 if (!validate_arglist (exp,
4940 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4941 return NULL_RTX;
4943 /* Due to the performance benefit, always inline the calls first. */
4944 rtx result = NULL_RTX;
4945 result = inline_expand_builtin_string_cmp (exp, target);
4946 if (result)
4947 return result;
4949 /* If c_strlen can determine an expression for one of the string
4950 lengths, and it doesn't have side effects, then emit cmpstrnsi
4951 using length MIN(strlen(string)+1, arg3). */
4952 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4953 if (cmpstrn_icode == CODE_FOR_nothing)
4954 return NULL_RTX;
4956 tree len;
4958 tree arg1 = CALL_EXPR_ARG (exp, 0);
4959 tree arg2 = CALL_EXPR_ARG (exp, 1);
4960 tree arg3 = CALL_EXPR_ARG (exp, 2);
4962 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4963 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4965 tree len1 = c_strlen (arg1, 1);
4966 tree len2 = c_strlen (arg2, 1);
4968 location_t loc = EXPR_LOCATION (exp);
4970 if (len1)
4971 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4972 if (len2)
4973 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4975 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4977 /* If we don't have a constant length for the first, use the length
4978 of the second, if we know it. If neither string is constant length,
4979 use the given length argument. We don't require a constant for
4980 this case; some cost analysis could be done if both are available
4981 but neither is constant. For now, assume they're equally cheap,
4982 unless one has side effects. If both strings have constant lengths,
4983 use the smaller. */
4985 if (!len1 && !len2)
4986 len = len3;
4987 else if (!len1)
4988 len = len2;
4989 else if (!len2)
4990 len = len1;
4991 else if (TREE_SIDE_EFFECTS (len1))
4992 len = len2;
4993 else if (TREE_SIDE_EFFECTS (len2))
4994 len = len1;
4995 else if (TREE_CODE (len1) != INTEGER_CST)
4996 len = len2;
4997 else if (TREE_CODE (len2) != INTEGER_CST)
4998 len = len1;
4999 else if (tree_int_cst_lt (len1, len2))
5000 len = len1;
5001 else
5002 len = len2;
5004 /* If we are not using the given length, we must incorporate it here.
5005 The actual new length parameter will be MIN(len,arg3) in this case. */
5006 if (len != len3)
5008 len = fold_convert_loc (loc, sizetype, len);
5009 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
5011 rtx arg1_rtx = get_memory_rtx (arg1, len);
5012 rtx arg2_rtx = get_memory_rtx (arg2, len);
5013 rtx arg3_rtx = expand_normal (len);
5014 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
5015 arg2_rtx, TREE_TYPE (len), arg3_rtx,
5016 MIN (arg1_align, arg2_align));
5018 tree fndecl = get_callee_fndecl (exp);
5019 if (result)
5021 /* Check to see if the argument was declared attribute nonstring
5022 and if so, issue a warning since at this point it's not known
5023 to be nul-terminated. */
5024 maybe_warn_nonstring_arg (fndecl, exp);
5026 /* Return the value in the proper mode for this function. */
5027 mode = TYPE_MODE (TREE_TYPE (exp));
5028 if (GET_MODE (result) == mode)
5029 return result;
5030 if (target == 0)
5031 return convert_to_mode (mode, result, 0);
5032 convert_move (target, result, 0);
5033 return target;
5036 /* Expand the library call ourselves using a stabilized argument
5037 list to avoid re-evaluating the function's arguments twice. */
5038 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
5039 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5040 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5041 return expand_call (fn, target, target == const0_rtx);
5044 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
5045 if that's convenient. */
5048 expand_builtin_saveregs (void)
5050 rtx val;
5051 rtx_insn *seq;
5053 /* Don't do __builtin_saveregs more than once in a function.
5054 Save the result of the first call and reuse it. */
5055 if (saveregs_value != 0)
5056 return saveregs_value;
5058 /* When this function is called, it means that registers must be
5059 saved on entry to this function. So we migrate the call to the
5060 first insn of this function. */
5062 start_sequence ();
5064 /* Do whatever the machine needs done in this case. */
5065 val = targetm.calls.expand_builtin_saveregs ();
5067 seq = get_insns ();
5068 end_sequence ();
5070 saveregs_value = val;
5072 /* Put the insns after the NOTE that starts the function. If this
5073 is inside a start_sequence, make the outer-level insn chain current, so
5074 the code is placed at the start of the function. */
5075 push_topmost_sequence ();
5076 emit_insn_after (seq, entry_of_function ());
5077 pop_topmost_sequence ();
5079 return val;
5082 /* Expand a call to __builtin_next_arg. */
5084 static rtx
5085 expand_builtin_next_arg (void)
5087 /* Checking arguments is already done in fold_builtin_next_arg
5088 that must be called before this function. */
5089 return expand_binop (ptr_mode, add_optab,
5090 crtl->args.internal_arg_pointer,
5091 crtl->args.arg_offset_rtx,
5092 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5095 /* Make it easier for the backends by protecting the valist argument
5096 from multiple evaluations. */
5098 static tree
5099 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
5101 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
5103 /* The current way of determining the type of valist is completely
5104 bogus. We should have the information on the va builtin instead. */
5105 if (!vatype)
5106 vatype = targetm.fn_abi_va_list (cfun->decl);
5108 if (TREE_CODE (vatype) == ARRAY_TYPE)
5110 if (TREE_SIDE_EFFECTS (valist))
5111 valist = save_expr (valist);
5113 /* For this case, the backends will be expecting a pointer to
5114 vatype, but it's possible we've actually been given an array
5115 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
5116 So fix it. */
5117 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5119 tree p1 = build_pointer_type (TREE_TYPE (vatype));
5120 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
5123 else
5125 tree pt = build_pointer_type (vatype);
5127 if (! needs_lvalue)
5129 if (! TREE_SIDE_EFFECTS (valist))
5130 return valist;
5132 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
5133 TREE_SIDE_EFFECTS (valist) = 1;
5136 if (TREE_SIDE_EFFECTS (valist))
5137 valist = save_expr (valist);
5138 valist = fold_build2_loc (loc, MEM_REF,
5139 vatype, valist, build_int_cst (pt, 0));
5142 return valist;
5145 /* The "standard" definition of va_list is void*. */
5147 tree
5148 std_build_builtin_va_list (void)
5150 return ptr_type_node;
5153 /* The "standard" abi va_list is va_list_type_node. */
5155 tree
5156 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
5158 return va_list_type_node;
5161 /* The "standard" type of va_list is va_list_type_node. */
5163 tree
5164 std_canonical_va_list_type (tree type)
5166 tree wtype, htype;
5168 wtype = va_list_type_node;
5169 htype = type;
5171 if (TREE_CODE (wtype) == ARRAY_TYPE)
5173 /* If va_list is an array type, the argument may have decayed
5174 to a pointer type, e.g. by being passed to another function.
5175 In that case, unwrap both types so that we can compare the
5176 underlying records. */
5177 if (TREE_CODE (htype) == ARRAY_TYPE
5178 || POINTER_TYPE_P (htype))
5180 wtype = TREE_TYPE (wtype);
5181 htype = TREE_TYPE (htype);
5184 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
5185 return va_list_type_node;
5187 return NULL_TREE;
5190 /* The "standard" implementation of va_start: just assign `nextarg' to
5191 the variable. */
5193 void
5194 std_expand_builtin_va_start (tree valist, rtx nextarg)
5196 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
5197 convert_move (va_r, nextarg, 0);
5200 /* Expand EXP, a call to __builtin_va_start. */
5202 static rtx
5203 expand_builtin_va_start (tree exp)
5205 rtx nextarg;
5206 tree valist;
5207 location_t loc = EXPR_LOCATION (exp);
5209 if (call_expr_nargs (exp) < 2)
5211 error_at (loc, "too few arguments to function %<va_start%>");
5212 return const0_rtx;
5215 if (fold_builtin_next_arg (exp, true))
5216 return const0_rtx;
5218 nextarg = expand_builtin_next_arg ();
5219 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
5221 if (targetm.expand_builtin_va_start)
5222 targetm.expand_builtin_va_start (valist, nextarg);
5223 else
5224 std_expand_builtin_va_start (valist, nextarg);
5226 return const0_rtx;
5229 /* Expand EXP, a call to __builtin_va_end. */
5231 static rtx
5232 expand_builtin_va_end (tree exp)
5234 tree valist = CALL_EXPR_ARG (exp, 0);
5236 /* Evaluate for side effects, if needed. I hate macros that don't
5237 do that. */
5238 if (TREE_SIDE_EFFECTS (valist))
5239 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5241 return const0_rtx;
5244 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5245 builtin rather than just as an assignment in stdarg.h because of the
5246 nastiness of array-type va_list types. */
5248 static rtx
5249 expand_builtin_va_copy (tree exp)
5251 tree dst, src, t;
5252 location_t loc = EXPR_LOCATION (exp);
5254 dst = CALL_EXPR_ARG (exp, 0);
5255 src = CALL_EXPR_ARG (exp, 1);
5257 dst = stabilize_va_list_loc (loc, dst, 1);
5258 src = stabilize_va_list_loc (loc, src, 0);
5260 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5262 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5264 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5265 TREE_SIDE_EFFECTS (t) = 1;
5266 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5268 else
5270 rtx dstb, srcb, size;
5272 /* Evaluate to pointers. */
5273 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5274 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5275 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5276 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5278 dstb = convert_memory_address (Pmode, dstb);
5279 srcb = convert_memory_address (Pmode, srcb);
5281 /* "Dereference" to BLKmode memories. */
5282 dstb = gen_rtx_MEM (BLKmode, dstb);
5283 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5284 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5285 srcb = gen_rtx_MEM (BLKmode, srcb);
5286 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5287 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5289 /* Copy. */
5290 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5293 return const0_rtx;
5296 /* Expand a call to one of the builtin functions __builtin_frame_address or
5297 __builtin_return_address. */
5299 static rtx
5300 expand_builtin_frame_address (tree fndecl, tree exp)
5302 /* The argument must be a nonnegative integer constant.
5303 It counts the number of frames to scan up the stack.
5304 The value is either the frame pointer value or the return
5305 address saved in that frame. */
5306 if (call_expr_nargs (exp) == 0)
5307 /* Warning about missing arg was already issued. */
5308 return const0_rtx;
5309 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5311 error ("invalid argument to %qD", fndecl);
5312 return const0_rtx;
5314 else
5316 /* Number of frames to scan up the stack. */
5317 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5319 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5321 /* Some ports cannot access arbitrary stack frames. */
5322 if (tem == NULL)
5324 warning (0, "unsupported argument to %qD", fndecl);
5325 return const0_rtx;
5328 if (count)
5330 /* Warn since no effort is made to ensure that any frame
5331 beyond the current one exists or can be safely reached. */
5332 warning (OPT_Wframe_address, "calling %qD with "
5333 "a nonzero argument is unsafe", fndecl);
5336 /* For __builtin_frame_address, return what we've got. */
5337 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5338 return tem;
5340 if (!REG_P (tem)
5341 && ! CONSTANT_P (tem))
5342 tem = copy_addr_to_reg (tem);
5343 return tem;
5347 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5348 failed and the caller should emit a normal call. */
5350 static rtx
5351 expand_builtin_alloca (tree exp)
5353 rtx op0;
5354 rtx result;
5355 unsigned int align;
5356 tree fndecl = get_callee_fndecl (exp);
5357 HOST_WIDE_INT max_size;
5358 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5359 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5360 bool valid_arglist
5361 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5362 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5363 VOID_TYPE)
5364 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5365 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5366 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5368 if (!valid_arglist)
5369 return NULL_RTX;
5371 if ((alloca_for_var
5372 && warn_vla_limit >= HOST_WIDE_INT_MAX
5373 && warn_alloc_size_limit < warn_vla_limit)
5374 || (!alloca_for_var
5375 && warn_alloca_limit >= HOST_WIDE_INT_MAX
5376 && warn_alloc_size_limit < warn_alloca_limit
5379 /* -Walloca-larger-than and -Wvla-larger-than settings of
5380 less than HOST_WIDE_INT_MAX override the more general
5381 -Walloc-size-larger-than so unless either of the former
5382 options is smaller than the last one (wchich would imply
5383 that the call was already checked), check the alloca
5384 arguments for overflow. */
5385 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5386 int idx[] = { 0, -1 };
5387 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5390 /* Compute the argument. */
5391 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5393 /* Compute the alignment. */
5394 align = (fcode == BUILT_IN_ALLOCA
5395 ? BIGGEST_ALIGNMENT
5396 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5398 /* Compute the maximum size. */
5399 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5400 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5401 : -1);
5403 /* Allocate the desired space. If the allocation stems from the declaration
5404 of a variable-sized object, it cannot accumulate. */
5405 result
5406 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5407 result = convert_memory_address (ptr_mode, result);
5409 return result;
5412 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5413 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5414 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5415 handle_builtin_stack_restore function. */
5417 static rtx
5418 expand_asan_emit_allocas_unpoison (tree exp)
5420 tree arg0 = CALL_EXPR_ARG (exp, 0);
5421 tree arg1 = CALL_EXPR_ARG (exp, 1);
5422 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5423 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5424 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5425 stack_pointer_rtx, NULL_RTX, 0,
5426 OPTAB_LIB_WIDEN);
5427 off = convert_modes (ptr_mode, Pmode, off, 0);
5428 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5429 OPTAB_LIB_WIDEN);
5430 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5431 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5432 top, ptr_mode, bot, ptr_mode);
5433 return ret;
5436 /* Expand a call to bswap builtin in EXP.
5437 Return NULL_RTX if a normal call should be emitted rather than expanding the
5438 function in-line. If convenient, the result should be placed in TARGET.
5439 SUBTARGET may be used as the target for computing one of EXP's operands. */
5441 static rtx
5442 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5443 rtx subtarget)
5445 tree arg;
5446 rtx op0;
5448 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5449 return NULL_RTX;
5451 arg = CALL_EXPR_ARG (exp, 0);
5452 op0 = expand_expr (arg,
5453 subtarget && GET_MODE (subtarget) == target_mode
5454 ? subtarget : NULL_RTX,
5455 target_mode, EXPAND_NORMAL);
5456 if (GET_MODE (op0) != target_mode)
5457 op0 = convert_to_mode (target_mode, op0, 1);
5459 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5461 gcc_assert (target);
5463 return convert_to_mode (target_mode, target, 1);
5466 /* Expand a call to a unary builtin in EXP.
5467 Return NULL_RTX if a normal call should be emitted rather than expanding the
5468 function in-line. If convenient, the result should be placed in TARGET.
5469 SUBTARGET may be used as the target for computing one of EXP's operands. */
5471 static rtx
5472 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5473 rtx subtarget, optab op_optab)
5475 rtx op0;
5477 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5478 return NULL_RTX;
5480 /* Compute the argument. */
5481 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5482 (subtarget
5483 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5484 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5485 VOIDmode, EXPAND_NORMAL);
5486 /* Compute op, into TARGET if possible.
5487 Set TARGET to wherever the result comes back. */
5488 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5489 op_optab, op0, target, op_optab != clrsb_optab);
5490 gcc_assert (target);
5492 return convert_to_mode (target_mode, target, 0);
5495 /* Expand a call to __builtin_expect. We just return our argument
5496 as the builtin_expect semantic should've been already executed by
5497 tree branch prediction pass. */
5499 static rtx
5500 expand_builtin_expect (tree exp, rtx target)
5502 tree arg;
5504 if (call_expr_nargs (exp) < 2)
5505 return const0_rtx;
5506 arg = CALL_EXPR_ARG (exp, 0);
5508 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5509 /* When guessing was done, the hints should be already stripped away. */
5510 gcc_assert (!flag_guess_branch_prob
5511 || optimize == 0 || seen_error ());
5512 return target;
5515 /* Expand a call to __builtin_expect_with_probability. We just return our
5516 argument as the builtin_expect semantic should've been already executed by
5517 tree branch prediction pass. */
5519 static rtx
5520 expand_builtin_expect_with_probability (tree exp, rtx target)
5522 tree arg;
5524 if (call_expr_nargs (exp) < 3)
5525 return const0_rtx;
5526 arg = CALL_EXPR_ARG (exp, 0);
5528 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5529 /* When guessing was done, the hints should be already stripped away. */
5530 gcc_assert (!flag_guess_branch_prob
5531 || optimize == 0 || seen_error ());
5532 return target;
5536 /* Expand a call to __builtin_assume_aligned. We just return our first
5537 argument as the builtin_assume_aligned semantic should've been already
5538 executed by CCP. */
5540 static rtx
5541 expand_builtin_assume_aligned (tree exp, rtx target)
5543 if (call_expr_nargs (exp) < 2)
5544 return const0_rtx;
5545 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5546 EXPAND_NORMAL);
5547 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5548 && (call_expr_nargs (exp) < 3
5549 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5550 return target;
5553 void
5554 expand_builtin_trap (void)
5556 if (targetm.have_trap ())
5558 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5559 /* For trap insns when not accumulating outgoing args force
5560 REG_ARGS_SIZE note to prevent crossjumping of calls with
5561 different args sizes. */
5562 if (!ACCUMULATE_OUTGOING_ARGS)
5563 add_args_size_note (insn, stack_pointer_delta);
5565 else
5567 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5568 tree call_expr = build_call_expr (fn, 0);
5569 expand_call (call_expr, NULL_RTX, false);
5572 emit_barrier ();
5575 /* Expand a call to __builtin_unreachable. We do nothing except emit
5576 a barrier saying that control flow will not pass here.
5578 It is the responsibility of the program being compiled to ensure
5579 that control flow does never reach __builtin_unreachable. */
5580 static void
5581 expand_builtin_unreachable (void)
5583 emit_barrier ();
5586 /* Expand EXP, a call to fabs, fabsf or fabsl.
5587 Return NULL_RTX if a normal call should be emitted rather than expanding
5588 the function inline. If convenient, the result should be placed
5589 in TARGET. SUBTARGET may be used as the target for computing
5590 the operand. */
5592 static rtx
5593 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5595 machine_mode mode;
5596 tree arg;
5597 rtx op0;
5599 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5600 return NULL_RTX;
5602 arg = CALL_EXPR_ARG (exp, 0);
5603 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5604 mode = TYPE_MODE (TREE_TYPE (arg));
5605 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5606 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5609 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5610 Return NULL is a normal call should be emitted rather than expanding the
5611 function inline. If convenient, the result should be placed in TARGET.
5612 SUBTARGET may be used as the target for computing the operand. */
5614 static rtx
5615 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5617 rtx op0, op1;
5618 tree arg;
5620 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5621 return NULL_RTX;
5623 arg = CALL_EXPR_ARG (exp, 0);
5624 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5626 arg = CALL_EXPR_ARG (exp, 1);
5627 op1 = expand_normal (arg);
5629 return expand_copysign (op0, op1, target);
5632 /* Expand a call to __builtin___clear_cache. */
5634 static rtx
5635 expand_builtin___clear_cache (tree exp)
5637 if (!targetm.code_for_clear_cache)
5639 #ifdef CLEAR_INSN_CACHE
5640 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5641 does something. Just do the default expansion to a call to
5642 __clear_cache(). */
5643 return NULL_RTX;
5644 #else
5645 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5646 does nothing. There is no need to call it. Do nothing. */
5647 return const0_rtx;
5648 #endif /* CLEAR_INSN_CACHE */
5651 /* We have a "clear_cache" insn, and it will handle everything. */
5652 tree begin, end;
5653 rtx begin_rtx, end_rtx;
5655 /* We must not expand to a library call. If we did, any
5656 fallback library function in libgcc that might contain a call to
5657 __builtin___clear_cache() would recurse infinitely. */
5658 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5660 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5661 return const0_rtx;
5664 if (targetm.have_clear_cache ())
5666 class expand_operand ops[2];
5668 begin = CALL_EXPR_ARG (exp, 0);
5669 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5671 end = CALL_EXPR_ARG (exp, 1);
5672 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5674 create_address_operand (&ops[0], begin_rtx);
5675 create_address_operand (&ops[1], end_rtx);
5676 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5677 return const0_rtx;
5679 return const0_rtx;
5682 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5684 static rtx
5685 round_trampoline_addr (rtx tramp)
5687 rtx temp, addend, mask;
5689 /* If we don't need too much alignment, we'll have been guaranteed
5690 proper alignment by get_trampoline_type. */
5691 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5692 return tramp;
5694 /* Round address up to desired boundary. */
5695 temp = gen_reg_rtx (Pmode);
5696 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5697 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5699 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5700 temp, 0, OPTAB_LIB_WIDEN);
5701 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5702 temp, 0, OPTAB_LIB_WIDEN);
5704 return tramp;
5707 static rtx
5708 expand_builtin_init_trampoline (tree exp, bool onstack)
5710 tree t_tramp, t_func, t_chain;
5711 rtx m_tramp, r_tramp, r_chain, tmp;
5713 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5714 POINTER_TYPE, VOID_TYPE))
5715 return NULL_RTX;
5717 t_tramp = CALL_EXPR_ARG (exp, 0);
5718 t_func = CALL_EXPR_ARG (exp, 1);
5719 t_chain = CALL_EXPR_ARG (exp, 2);
5721 r_tramp = expand_normal (t_tramp);
5722 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5723 MEM_NOTRAP_P (m_tramp) = 1;
5725 /* If ONSTACK, the TRAMP argument should be the address of a field
5726 within the local function's FRAME decl. Either way, let's see if
5727 we can fill in the MEM_ATTRs for this memory. */
5728 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5729 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5731 /* Creator of a heap trampoline is responsible for making sure the
5732 address is aligned to at least STACK_BOUNDARY. Normally malloc
5733 will ensure this anyhow. */
5734 tmp = round_trampoline_addr (r_tramp);
5735 if (tmp != r_tramp)
5737 m_tramp = change_address (m_tramp, BLKmode, tmp);
5738 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5739 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5742 /* The FUNC argument should be the address of the nested function.
5743 Extract the actual function decl to pass to the hook. */
5744 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5745 t_func = TREE_OPERAND (t_func, 0);
5746 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5748 r_chain = expand_normal (t_chain);
5750 /* Generate insns to initialize the trampoline. */
5751 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5753 if (onstack)
5755 trampolines_created = 1;
5757 if (targetm.calls.custom_function_descriptors != 0)
5758 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5759 "trampoline generated for nested function %qD", t_func);
5762 return const0_rtx;
5765 static rtx
5766 expand_builtin_adjust_trampoline (tree exp)
5768 rtx tramp;
5770 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5771 return NULL_RTX;
5773 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5774 tramp = round_trampoline_addr (tramp);
5775 if (targetm.calls.trampoline_adjust_address)
5776 tramp = targetm.calls.trampoline_adjust_address (tramp);
5778 return tramp;
5781 /* Expand a call to the builtin descriptor initialization routine.
5782 A descriptor is made up of a couple of pointers to the static
5783 chain and the code entry in this order. */
5785 static rtx
5786 expand_builtin_init_descriptor (tree exp)
5788 tree t_descr, t_func, t_chain;
5789 rtx m_descr, r_descr, r_func, r_chain;
5791 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5792 VOID_TYPE))
5793 return NULL_RTX;
5795 t_descr = CALL_EXPR_ARG (exp, 0);
5796 t_func = CALL_EXPR_ARG (exp, 1);
5797 t_chain = CALL_EXPR_ARG (exp, 2);
5799 r_descr = expand_normal (t_descr);
5800 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5801 MEM_NOTRAP_P (m_descr) = 1;
5802 set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
5804 r_func = expand_normal (t_func);
5805 r_chain = expand_normal (t_chain);
5807 /* Generate insns to initialize the descriptor. */
5808 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5809 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5810 POINTER_SIZE / BITS_PER_UNIT), r_func);
5812 return const0_rtx;
5815 /* Expand a call to the builtin descriptor adjustment routine. */
5817 static rtx
5818 expand_builtin_adjust_descriptor (tree exp)
5820 rtx tramp;
5822 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5823 return NULL_RTX;
5825 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5827 /* Unalign the descriptor to allow runtime identification. */
5828 tramp = plus_constant (ptr_mode, tramp,
5829 targetm.calls.custom_function_descriptors);
5831 return force_operand (tramp, NULL_RTX);
5834 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5835 function. The function first checks whether the back end provides
5836 an insn to implement signbit for the respective mode. If not, it
5837 checks whether the floating point format of the value is such that
5838 the sign bit can be extracted. If that is not the case, error out.
5839 EXP is the expression that is a call to the builtin function; if
5840 convenient, the result should be placed in TARGET. */
5841 static rtx
5842 expand_builtin_signbit (tree exp, rtx target)
5844 const struct real_format *fmt;
5845 scalar_float_mode fmode;
5846 scalar_int_mode rmode, imode;
5847 tree arg;
5848 int word, bitpos;
5849 enum insn_code icode;
5850 rtx temp;
5851 location_t loc = EXPR_LOCATION (exp);
5853 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5854 return NULL_RTX;
5856 arg = CALL_EXPR_ARG (exp, 0);
5857 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5858 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5859 fmt = REAL_MODE_FORMAT (fmode);
5861 arg = builtin_save_expr (arg);
5863 /* Expand the argument yielding a RTX expression. */
5864 temp = expand_normal (arg);
5866 /* Check if the back end provides an insn that handles signbit for the
5867 argument's mode. */
5868 icode = optab_handler (signbit_optab, fmode);
5869 if (icode != CODE_FOR_nothing)
5871 rtx_insn *last = get_last_insn ();
5872 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5873 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5874 return target;
5875 delete_insns_since (last);
5878 /* For floating point formats without a sign bit, implement signbit
5879 as "ARG < 0.0". */
5880 bitpos = fmt->signbit_ro;
5881 if (bitpos < 0)
5883 /* But we can't do this if the format supports signed zero. */
5884 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5886 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5887 build_real (TREE_TYPE (arg), dconst0));
5888 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5891 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5893 imode = int_mode_for_mode (fmode).require ();
5894 temp = gen_lowpart (imode, temp);
5896 else
5898 imode = word_mode;
5899 /* Handle targets with different FP word orders. */
5900 if (FLOAT_WORDS_BIG_ENDIAN)
5901 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5902 else
5903 word = bitpos / BITS_PER_WORD;
5904 temp = operand_subword_force (temp, word, fmode);
5905 bitpos = bitpos % BITS_PER_WORD;
5908 /* Force the intermediate word_mode (or narrower) result into a
5909 register. This avoids attempting to create paradoxical SUBREGs
5910 of floating point modes below. */
5911 temp = force_reg (imode, temp);
5913 /* If the bitpos is within the "result mode" lowpart, the operation
5914 can be implement with a single bitwise AND. Otherwise, we need
5915 a right shift and an AND. */
5917 if (bitpos < GET_MODE_BITSIZE (rmode))
5919 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5921 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5922 temp = gen_lowpart (rmode, temp);
5923 temp = expand_binop (rmode, and_optab, temp,
5924 immed_wide_int_const (mask, rmode),
5925 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5927 else
5929 /* Perform a logical right shift to place the signbit in the least
5930 significant bit, then truncate the result to the desired mode
5931 and mask just this bit. */
5932 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5933 temp = gen_lowpart (rmode, temp);
5934 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5935 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5938 return temp;
5941 /* Expand fork or exec calls. TARGET is the desired target of the
5942 call. EXP is the call. FN is the
5943 identificator of the actual function. IGNORE is nonzero if the
5944 value is to be ignored. */
5946 static rtx
5947 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5949 tree id, decl;
5950 tree call;
5952 /* If we are not profiling, just call the function. */
5953 if (!profile_arc_flag)
5954 return NULL_RTX;
5956 /* Otherwise call the wrapper. This should be equivalent for the rest of
5957 compiler, so the code does not diverge, and the wrapper may run the
5958 code necessary for keeping the profiling sane. */
5960 switch (DECL_FUNCTION_CODE (fn))
5962 case BUILT_IN_FORK:
5963 id = get_identifier ("__gcov_fork");
5964 break;
5966 case BUILT_IN_EXECL:
5967 id = get_identifier ("__gcov_execl");
5968 break;
5970 case BUILT_IN_EXECV:
5971 id = get_identifier ("__gcov_execv");
5972 break;
5974 case BUILT_IN_EXECLP:
5975 id = get_identifier ("__gcov_execlp");
5976 break;
5978 case BUILT_IN_EXECLE:
5979 id = get_identifier ("__gcov_execle");
5980 break;
5982 case BUILT_IN_EXECVP:
5983 id = get_identifier ("__gcov_execvp");
5984 break;
5986 case BUILT_IN_EXECVE:
5987 id = get_identifier ("__gcov_execve");
5988 break;
5990 default:
5991 gcc_unreachable ();
5994 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5995 FUNCTION_DECL, id, TREE_TYPE (fn));
5996 DECL_EXTERNAL (decl) = 1;
5997 TREE_PUBLIC (decl) = 1;
5998 DECL_ARTIFICIAL (decl) = 1;
5999 TREE_NOTHROW (decl) = 1;
6000 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
6001 DECL_VISIBILITY_SPECIFIED (decl) = 1;
6002 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
6003 return expand_call (call, target, ignore);
6008 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
6009 the pointer in these functions is void*, the tree optimizers may remove
6010 casts. The mode computed in expand_builtin isn't reliable either, due
6011 to __sync_bool_compare_and_swap.
6013 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
6014 group of builtins. This gives us log2 of the mode size. */
6016 static inline machine_mode
6017 get_builtin_sync_mode (int fcode_diff)
6019 /* The size is not negotiable, so ask not to get BLKmode in return
6020 if the target indicates that a smaller size would be better. */
6021 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
6024 /* Expand the memory expression LOC and return the appropriate memory operand
6025 for the builtin_sync operations. */
6027 static rtx
6028 get_builtin_sync_mem (tree loc, machine_mode mode)
6030 rtx addr, mem;
6031 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
6032 ? TREE_TYPE (TREE_TYPE (loc))
6033 : TREE_TYPE (loc));
6034 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
6036 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
6037 addr = convert_memory_address (addr_mode, addr);
6039 /* Note that we explicitly do not want any alias information for this
6040 memory, so that we kill all other live memories. Otherwise we don't
6041 satisfy the full barrier semantics of the intrinsic. */
6042 mem = gen_rtx_MEM (mode, addr);
6044 set_mem_addr_space (mem, addr_space);
6046 mem = validize_mem (mem);
6048 /* The alignment needs to be at least according to that of the mode. */
6049 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
6050 get_pointer_alignment (loc)));
6051 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6052 MEM_VOLATILE_P (mem) = 1;
6054 return mem;
6057 /* Make sure an argument is in the right mode.
6058 EXP is the tree argument.
6059 MODE is the mode it should be in. */
6061 static rtx
6062 expand_expr_force_mode (tree exp, machine_mode mode)
6064 rtx val;
6065 machine_mode old_mode;
6067 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
6068 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6069 of CONST_INTs, where we know the old_mode only from the call argument. */
6071 old_mode = GET_MODE (val);
6072 if (old_mode == VOIDmode)
6073 old_mode = TYPE_MODE (TREE_TYPE (exp));
6074 val = convert_modes (mode, old_mode, val, 1);
6075 return val;
6079 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6080 EXP is the CALL_EXPR. CODE is the rtx code
6081 that corresponds to the arithmetic or logical operation from the name;
6082 an exception here is that NOT actually means NAND. TARGET is an optional
6083 place for us to store the results; AFTER is true if this is the
6084 fetch_and_xxx form. */
6086 static rtx
6087 expand_builtin_sync_operation (machine_mode mode, tree exp,
6088 enum rtx_code code, bool after,
6089 rtx target)
6091 rtx val, mem;
6092 location_t loc = EXPR_LOCATION (exp);
6094 if (code == NOT && warn_sync_nand)
6096 tree fndecl = get_callee_fndecl (exp);
6097 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6099 static bool warned_f_a_n, warned_n_a_f;
6101 switch (fcode)
6103 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6104 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6105 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6106 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6107 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6108 if (warned_f_a_n)
6109 break;
6111 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
6112 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6113 warned_f_a_n = true;
6114 break;
6116 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6117 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6118 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6119 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6120 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6121 if (warned_n_a_f)
6122 break;
6124 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
6125 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6126 warned_n_a_f = true;
6127 break;
6129 default:
6130 gcc_unreachable ();
6134 /* Expand the operands. */
6135 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6136 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6138 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
6139 after);
6142 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6143 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6144 true if this is the boolean form. TARGET is a place for us to store the
6145 results; this is NOT optional if IS_BOOL is true. */
6147 static rtx
6148 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
6149 bool is_bool, rtx target)
6151 rtx old_val, new_val, mem;
6152 rtx *pbool, *poval;
6154 /* Expand the operands. */
6155 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6156 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6157 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6159 pbool = poval = NULL;
6160 if (target != const0_rtx)
6162 if (is_bool)
6163 pbool = &target;
6164 else
6165 poval = &target;
6167 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
6168 false, MEMMODEL_SYNC_SEQ_CST,
6169 MEMMODEL_SYNC_SEQ_CST))
6170 return NULL_RTX;
6172 return target;
6175 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6176 general form is actually an atomic exchange, and some targets only
6177 support a reduced form with the second argument being a constant 1.
6178 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6179 the results. */
6181 static rtx
6182 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
6183 rtx target)
6185 rtx val, mem;
6187 /* Expand the operands. */
6188 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6189 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6191 return expand_sync_lock_test_and_set (target, mem, val);
6194 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6196 static void
6197 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
6199 rtx mem;
6201 /* Expand the operands. */
6202 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6204 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
6207 /* Given an integer representing an ``enum memmodel'', verify its
6208 correctness and return the memory model enum. */
6210 static enum memmodel
6211 get_memmodel (tree exp)
6213 rtx op;
6214 unsigned HOST_WIDE_INT val;
6215 location_t loc
6216 = expansion_point_location_if_in_system_header (input_location);
6218 /* If the parameter is not a constant, it's a run time value so we'll just
6219 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6220 if (TREE_CODE (exp) != INTEGER_CST)
6221 return MEMMODEL_SEQ_CST;
6223 op = expand_normal (exp);
6225 val = INTVAL (op);
6226 if (targetm.memmodel_check)
6227 val = targetm.memmodel_check (val);
6228 else if (val & ~MEMMODEL_MASK)
6230 warning_at (loc, OPT_Winvalid_memory_model,
6231 "unknown architecture specifier in memory model to builtin");
6232 return MEMMODEL_SEQ_CST;
6235 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6236 if (memmodel_base (val) >= MEMMODEL_LAST)
6238 warning_at (loc, OPT_Winvalid_memory_model,
6239 "invalid memory model argument to builtin");
6240 return MEMMODEL_SEQ_CST;
6243 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6244 be conservative and promote consume to acquire. */
6245 if (val == MEMMODEL_CONSUME)
6246 val = MEMMODEL_ACQUIRE;
6248 return (enum memmodel) val;
6251 /* Expand the __atomic_exchange intrinsic:
6252 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6253 EXP is the CALL_EXPR.
6254 TARGET is an optional place for us to store the results. */
6256 static rtx
6257 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
6259 rtx val, mem;
6260 enum memmodel model;
6262 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6264 if (!flag_inline_atomics)
6265 return NULL_RTX;
6267 /* Expand the operands. */
6268 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6269 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6271 return expand_atomic_exchange (target, mem, val, model);
6274 /* Expand the __atomic_compare_exchange intrinsic:
6275 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6276 TYPE desired, BOOL weak,
6277 enum memmodel success,
6278 enum memmodel failure)
6279 EXP is the CALL_EXPR.
6280 TARGET is an optional place for us to store the results. */
6282 static rtx
6283 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
6284 rtx target)
6286 rtx expect, desired, mem, oldval;
6287 rtx_code_label *label;
6288 enum memmodel success, failure;
6289 tree weak;
6290 bool is_weak;
6291 location_t loc
6292 = expansion_point_location_if_in_system_header (input_location);
6294 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6295 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6297 if (failure > success)
6299 warning_at (loc, OPT_Winvalid_memory_model,
6300 "failure memory model cannot be stronger than success "
6301 "memory model for %<__atomic_compare_exchange%>");
6302 success = MEMMODEL_SEQ_CST;
6305 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6307 warning_at (loc, OPT_Winvalid_memory_model,
6308 "invalid failure memory model for "
6309 "%<__atomic_compare_exchange%>");
6310 failure = MEMMODEL_SEQ_CST;
6311 success = MEMMODEL_SEQ_CST;
6315 if (!flag_inline_atomics)
6316 return NULL_RTX;
6318 /* Expand the operands. */
6319 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6321 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6322 expect = convert_memory_address (Pmode, expect);
6323 expect = gen_rtx_MEM (mode, expect);
6324 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6326 weak = CALL_EXPR_ARG (exp, 3);
6327 is_weak = false;
6328 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6329 is_weak = true;
6331 if (target == const0_rtx)
6332 target = NULL;
6334 /* Lest the rtl backend create a race condition with an imporoper store
6335 to memory, always create a new pseudo for OLDVAL. */
6336 oldval = NULL;
6338 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6339 is_weak, success, failure))
6340 return NULL_RTX;
6342 /* Conditionally store back to EXPECT, lest we create a race condition
6343 with an improper store to memory. */
6344 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6345 the normal case where EXPECT is totally private, i.e. a register. At
6346 which point the store can be unconditional. */
6347 label = gen_label_rtx ();
6348 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6349 GET_MODE (target), 1, label);
6350 emit_move_insn (expect, oldval);
6351 emit_label (label);
6353 return target;
6356 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6357 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6358 call. The weak parameter must be dropped to match the expected parameter
6359 list and the expected argument changed from value to pointer to memory
6360 slot. */
6362 static void
6363 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6365 unsigned int z;
6366 vec<tree, va_gc> *vec;
6368 vec_alloc (vec, 5);
6369 vec->quick_push (gimple_call_arg (call, 0));
6370 tree expected = gimple_call_arg (call, 1);
6371 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6372 TREE_TYPE (expected));
6373 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6374 if (expd != x)
6375 emit_move_insn (x, expd);
6376 tree v = make_tree (TREE_TYPE (expected), x);
6377 vec->quick_push (build1 (ADDR_EXPR,
6378 build_pointer_type (TREE_TYPE (expected)), v));
6379 vec->quick_push (gimple_call_arg (call, 2));
6380 /* Skip the boolean weak parameter. */
6381 for (z = 4; z < 6; z++)
6382 vec->quick_push (gimple_call_arg (call, z));
6383 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6384 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6385 gcc_assert (bytes_log2 < 5);
6386 built_in_function fncode
6387 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6388 + bytes_log2);
6389 tree fndecl = builtin_decl_explicit (fncode);
6390 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6391 fndecl);
6392 tree exp = build_call_vec (boolean_type_node, fn, vec);
6393 tree lhs = gimple_call_lhs (call);
6394 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6395 if (lhs)
6397 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6398 if (GET_MODE (boolret) != mode)
6399 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6400 x = force_reg (mode, x);
6401 write_complex_part (target, boolret, true);
6402 write_complex_part (target, x, false);
6406 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6408 void
6409 expand_ifn_atomic_compare_exchange (gcall *call)
6411 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6412 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6413 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6414 rtx expect, desired, mem, oldval, boolret;
6415 enum memmodel success, failure;
6416 tree lhs;
6417 bool is_weak;
6418 location_t loc
6419 = expansion_point_location_if_in_system_header (gimple_location (call));
6421 success = get_memmodel (gimple_call_arg (call, 4));
6422 failure = get_memmodel (gimple_call_arg (call, 5));
6424 if (failure > success)
6426 warning_at (loc, OPT_Winvalid_memory_model,
6427 "failure memory model cannot be stronger than success "
6428 "memory model for %<__atomic_compare_exchange%>");
6429 success = MEMMODEL_SEQ_CST;
6432 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6434 warning_at (loc, OPT_Winvalid_memory_model,
6435 "invalid failure memory model for "
6436 "%<__atomic_compare_exchange%>");
6437 failure = MEMMODEL_SEQ_CST;
6438 success = MEMMODEL_SEQ_CST;
6441 if (!flag_inline_atomics)
6443 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6444 return;
6447 /* Expand the operands. */
6448 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6450 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6451 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6453 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6455 boolret = NULL;
6456 oldval = NULL;
6458 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6459 is_weak, success, failure))
6461 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6462 return;
6465 lhs = gimple_call_lhs (call);
6466 if (lhs)
6468 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6469 if (GET_MODE (boolret) != mode)
6470 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6471 write_complex_part (target, boolret, true);
6472 write_complex_part (target, oldval, false);
6476 /* Expand the __atomic_load intrinsic:
6477 TYPE __atomic_load (TYPE *object, enum memmodel)
6478 EXP is the CALL_EXPR.
6479 TARGET is an optional place for us to store the results. */
6481 static rtx
6482 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6484 rtx mem;
6485 enum memmodel model;
6487 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6488 if (is_mm_release (model) || is_mm_acq_rel (model))
6490 location_t loc
6491 = expansion_point_location_if_in_system_header (input_location);
6492 warning_at (loc, OPT_Winvalid_memory_model,
6493 "invalid memory model for %<__atomic_load%>");
6494 model = MEMMODEL_SEQ_CST;
6497 if (!flag_inline_atomics)
6498 return NULL_RTX;
6500 /* Expand the operand. */
6501 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6503 return expand_atomic_load (target, mem, model);
6507 /* Expand the __atomic_store intrinsic:
6508 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6509 EXP is the CALL_EXPR.
6510 TARGET is an optional place for us to store the results. */
6512 static rtx
6513 expand_builtin_atomic_store (machine_mode mode, tree exp)
6515 rtx mem, val;
6516 enum memmodel model;
6518 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6519 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6520 || is_mm_release (model)))
6522 location_t loc
6523 = expansion_point_location_if_in_system_header (input_location);
6524 warning_at (loc, OPT_Winvalid_memory_model,
6525 "invalid memory model for %<__atomic_store%>");
6526 model = MEMMODEL_SEQ_CST;
6529 if (!flag_inline_atomics)
6530 return NULL_RTX;
6532 /* Expand the operands. */
6533 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6534 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6536 return expand_atomic_store (mem, val, model, false);
6539 /* Expand the __atomic_fetch_XXX intrinsic:
6540 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6541 EXP is the CALL_EXPR.
6542 TARGET is an optional place for us to store the results.
6543 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6544 FETCH_AFTER is true if returning the result of the operation.
6545 FETCH_AFTER is false if returning the value before the operation.
6546 IGNORE is true if the result is not used.
6547 EXT_CALL is the correct builtin for an external call if this cannot be
6548 resolved to an instruction sequence. */
6550 static rtx
6551 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6552 enum rtx_code code, bool fetch_after,
6553 bool ignore, enum built_in_function ext_call)
6555 rtx val, mem, ret;
6556 enum memmodel model;
6557 tree fndecl;
6558 tree addr;
6560 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6562 /* Expand the operands. */
6563 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6564 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6566 /* Only try generating instructions if inlining is turned on. */
6567 if (flag_inline_atomics)
6569 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6570 if (ret)
6571 return ret;
6574 /* Return if a different routine isn't needed for the library call. */
6575 if (ext_call == BUILT_IN_NONE)
6576 return NULL_RTX;
6578 /* Change the call to the specified function. */
6579 fndecl = get_callee_fndecl (exp);
6580 addr = CALL_EXPR_FN (exp);
6581 STRIP_NOPS (addr);
6583 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6584 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6586 /* If we will emit code after the call, the call cannot be a tail call.
6587 If it is emitted as a tail call, a barrier is emitted after it, and
6588 then all trailing code is removed. */
6589 if (!ignore)
6590 CALL_EXPR_TAILCALL (exp) = 0;
6592 /* Expand the call here so we can emit trailing code. */
6593 ret = expand_call (exp, target, ignore);
6595 /* Replace the original function just in case it matters. */
6596 TREE_OPERAND (addr, 0) = fndecl;
6598 /* Then issue the arithmetic correction to return the right result. */
6599 if (!ignore)
6601 if (code == NOT)
6603 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6604 OPTAB_LIB_WIDEN);
6605 ret = expand_simple_unop (mode, NOT, ret, target, true);
6607 else
6608 ret = expand_simple_binop (mode, code, ret, val, target, true,
6609 OPTAB_LIB_WIDEN);
6611 return ret;
6614 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6616 void
6617 expand_ifn_atomic_bit_test_and (gcall *call)
6619 tree ptr = gimple_call_arg (call, 0);
6620 tree bit = gimple_call_arg (call, 1);
6621 tree flag = gimple_call_arg (call, 2);
6622 tree lhs = gimple_call_lhs (call);
6623 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6624 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6625 enum rtx_code code;
6626 optab optab;
6627 class expand_operand ops[5];
6629 gcc_assert (flag_inline_atomics);
6631 if (gimple_call_num_args (call) == 4)
6632 model = get_memmodel (gimple_call_arg (call, 3));
6634 rtx mem = get_builtin_sync_mem (ptr, mode);
6635 rtx val = expand_expr_force_mode (bit, mode);
6637 switch (gimple_call_internal_fn (call))
6639 case IFN_ATOMIC_BIT_TEST_AND_SET:
6640 code = IOR;
6641 optab = atomic_bit_test_and_set_optab;
6642 break;
6643 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6644 code = XOR;
6645 optab = atomic_bit_test_and_complement_optab;
6646 break;
6647 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6648 code = AND;
6649 optab = atomic_bit_test_and_reset_optab;
6650 break;
6651 default:
6652 gcc_unreachable ();
6655 if (lhs == NULL_TREE)
6657 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6658 val, NULL_RTX, true, OPTAB_DIRECT);
6659 if (code == AND)
6660 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6661 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6662 return;
6665 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6666 enum insn_code icode = direct_optab_handler (optab, mode);
6667 gcc_assert (icode != CODE_FOR_nothing);
6668 create_output_operand (&ops[0], target, mode);
6669 create_fixed_operand (&ops[1], mem);
6670 create_convert_operand_to (&ops[2], val, mode, true);
6671 create_integer_operand (&ops[3], model);
6672 create_integer_operand (&ops[4], integer_onep (flag));
6673 if (maybe_expand_insn (icode, 5, ops))
6674 return;
6676 rtx bitval = val;
6677 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6678 val, NULL_RTX, true, OPTAB_DIRECT);
6679 rtx maskval = val;
6680 if (code == AND)
6681 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6682 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6683 code, model, false);
6684 if (integer_onep (flag))
6686 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6687 NULL_RTX, true, OPTAB_DIRECT);
6688 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6689 true, OPTAB_DIRECT);
6691 else
6692 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6693 OPTAB_DIRECT);
6694 if (result != target)
6695 emit_move_insn (target, result);
6698 /* Expand an atomic clear operation.
6699 void _atomic_clear (BOOL *obj, enum memmodel)
6700 EXP is the call expression. */
6702 static rtx
6703 expand_builtin_atomic_clear (tree exp)
6705 machine_mode mode;
6706 rtx mem, ret;
6707 enum memmodel model;
6709 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6710 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6711 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6713 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6715 location_t loc
6716 = expansion_point_location_if_in_system_header (input_location);
6717 warning_at (loc, OPT_Winvalid_memory_model,
6718 "invalid memory model for %<__atomic_store%>");
6719 model = MEMMODEL_SEQ_CST;
6722 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6723 Failing that, a store is issued by __atomic_store. The only way this can
6724 fail is if the bool type is larger than a word size. Unlikely, but
6725 handle it anyway for completeness. Assume a single threaded model since
6726 there is no atomic support in this case, and no barriers are required. */
6727 ret = expand_atomic_store (mem, const0_rtx, model, true);
6728 if (!ret)
6729 emit_move_insn (mem, const0_rtx);
6730 return const0_rtx;
6733 /* Expand an atomic test_and_set operation.
6734 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6735 EXP is the call expression. */
6737 static rtx
6738 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6740 rtx mem;
6741 enum memmodel model;
6742 machine_mode mode;
6744 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6745 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6746 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6748 return expand_atomic_test_and_set (target, mem, model);
6752 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6753 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6755 static tree
6756 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6758 int size;
6759 machine_mode mode;
6760 unsigned int mode_align, type_align;
6762 if (TREE_CODE (arg0) != INTEGER_CST)
6763 return NULL_TREE;
6765 /* We need a corresponding integer mode for the access to be lock-free. */
6766 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6767 if (!int_mode_for_size (size, 0).exists (&mode))
6768 return boolean_false_node;
6770 mode_align = GET_MODE_ALIGNMENT (mode);
6772 if (TREE_CODE (arg1) == INTEGER_CST)
6774 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6776 /* Either this argument is null, or it's a fake pointer encoding
6777 the alignment of the object. */
6778 val = least_bit_hwi (val);
6779 val *= BITS_PER_UNIT;
6781 if (val == 0 || mode_align < val)
6782 type_align = mode_align;
6783 else
6784 type_align = val;
6786 else
6788 tree ttype = TREE_TYPE (arg1);
6790 /* This function is usually invoked and folded immediately by the front
6791 end before anything else has a chance to look at it. The pointer
6792 parameter at this point is usually cast to a void *, so check for that
6793 and look past the cast. */
6794 if (CONVERT_EXPR_P (arg1)
6795 && POINTER_TYPE_P (ttype)
6796 && VOID_TYPE_P (TREE_TYPE (ttype))
6797 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6798 arg1 = TREE_OPERAND (arg1, 0);
6800 ttype = TREE_TYPE (arg1);
6801 gcc_assert (POINTER_TYPE_P (ttype));
6803 /* Get the underlying type of the object. */
6804 ttype = TREE_TYPE (ttype);
6805 type_align = TYPE_ALIGN (ttype);
6808 /* If the object has smaller alignment, the lock free routines cannot
6809 be used. */
6810 if (type_align < mode_align)
6811 return boolean_false_node;
6813 /* Check if a compare_and_swap pattern exists for the mode which represents
6814 the required size. The pattern is not allowed to fail, so the existence
6815 of the pattern indicates support is present. Also require that an
6816 atomic load exists for the required size. */
6817 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6818 return boolean_true_node;
6819 else
6820 return boolean_false_node;
6823 /* Return true if the parameters to call EXP represent an object which will
6824 always generate lock free instructions. The first argument represents the
6825 size of the object, and the second parameter is a pointer to the object
6826 itself. If NULL is passed for the object, then the result is based on
6827 typical alignment for an object of the specified size. Otherwise return
6828 false. */
6830 static rtx
6831 expand_builtin_atomic_always_lock_free (tree exp)
6833 tree size;
6834 tree arg0 = CALL_EXPR_ARG (exp, 0);
6835 tree arg1 = CALL_EXPR_ARG (exp, 1);
6837 if (TREE_CODE (arg0) != INTEGER_CST)
6839 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
6840 return const0_rtx;
6843 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6844 if (size == boolean_true_node)
6845 return const1_rtx;
6846 return const0_rtx;
6849 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6850 is lock free on this architecture. */
6852 static tree
6853 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6855 if (!flag_inline_atomics)
6856 return NULL_TREE;
6858 /* If it isn't always lock free, don't generate a result. */
6859 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6860 return boolean_true_node;
6862 return NULL_TREE;
6865 /* Return true if the parameters to call EXP represent an object which will
6866 always generate lock free instructions. The first argument represents the
6867 size of the object, and the second parameter is a pointer to the object
6868 itself. If NULL is passed for the object, then the result is based on
6869 typical alignment for an object of the specified size. Otherwise return
6870 NULL*/
6872 static rtx
6873 expand_builtin_atomic_is_lock_free (tree exp)
6875 tree size;
6876 tree arg0 = CALL_EXPR_ARG (exp, 0);
6877 tree arg1 = CALL_EXPR_ARG (exp, 1);
6879 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6881 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
6882 return NULL_RTX;
6885 if (!flag_inline_atomics)
6886 return NULL_RTX;
6888 /* If the value is known at compile time, return the RTX for it. */
6889 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6890 if (size == boolean_true_node)
6891 return const1_rtx;
6893 return NULL_RTX;
6896 /* Expand the __atomic_thread_fence intrinsic:
6897 void __atomic_thread_fence (enum memmodel)
6898 EXP is the CALL_EXPR. */
6900 static void
6901 expand_builtin_atomic_thread_fence (tree exp)
6903 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6904 expand_mem_thread_fence (model);
6907 /* Expand the __atomic_signal_fence intrinsic:
6908 void __atomic_signal_fence (enum memmodel)
6909 EXP is the CALL_EXPR. */
6911 static void
6912 expand_builtin_atomic_signal_fence (tree exp)
6914 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6915 expand_mem_signal_fence (model);
6918 /* Expand the __sync_synchronize intrinsic. */
6920 static void
6921 expand_builtin_sync_synchronize (void)
6923 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6926 static rtx
6927 expand_builtin_thread_pointer (tree exp, rtx target)
6929 enum insn_code icode;
6930 if (!validate_arglist (exp, VOID_TYPE))
6931 return const0_rtx;
6932 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6933 if (icode != CODE_FOR_nothing)
6935 class expand_operand op;
6936 /* If the target is not sutitable then create a new target. */
6937 if (target == NULL_RTX
6938 || !REG_P (target)
6939 || GET_MODE (target) != Pmode)
6940 target = gen_reg_rtx (Pmode);
6941 create_output_operand (&op, target, Pmode);
6942 expand_insn (icode, 1, &op);
6943 return target;
6945 error ("%<__builtin_thread_pointer%> is not supported on this target");
6946 return const0_rtx;
6949 static void
6950 expand_builtin_set_thread_pointer (tree exp)
6952 enum insn_code icode;
6953 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6954 return;
6955 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6956 if (icode != CODE_FOR_nothing)
6958 class expand_operand op;
6959 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6960 Pmode, EXPAND_NORMAL);
6961 create_input_operand (&op, val, Pmode);
6962 expand_insn (icode, 1, &op);
6963 return;
6965 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
6969 /* Emit code to restore the current value of stack. */
6971 static void
6972 expand_stack_restore (tree var)
6974 rtx_insn *prev;
6975 rtx sa = expand_normal (var);
6977 sa = convert_memory_address (Pmode, sa);
6979 prev = get_last_insn ();
6980 emit_stack_restore (SAVE_BLOCK, sa);
6982 record_new_stack_level ();
6984 fixup_args_size_notes (prev, get_last_insn (), 0);
6987 /* Emit code to save the current value of stack. */
6989 static rtx
6990 expand_stack_save (void)
6992 rtx ret = NULL_RTX;
6994 emit_stack_save (SAVE_BLOCK, &ret);
6995 return ret;
6998 /* Emit code to get the openacc gang, worker or vector id or size. */
7000 static rtx
7001 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
7003 const char *name;
7004 rtx fallback_retval;
7005 rtx_insn *(*gen_fn) (rtx, rtx);
7006 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
7008 case BUILT_IN_GOACC_PARLEVEL_ID:
7009 name = "__builtin_goacc_parlevel_id";
7010 fallback_retval = const0_rtx;
7011 gen_fn = targetm.gen_oacc_dim_pos;
7012 break;
7013 case BUILT_IN_GOACC_PARLEVEL_SIZE:
7014 name = "__builtin_goacc_parlevel_size";
7015 fallback_retval = const1_rtx;
7016 gen_fn = targetm.gen_oacc_dim_size;
7017 break;
7018 default:
7019 gcc_unreachable ();
7022 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
7024 error ("%qs only supported in OpenACC code", name);
7025 return const0_rtx;
7028 tree arg = CALL_EXPR_ARG (exp, 0);
7029 if (TREE_CODE (arg) != INTEGER_CST)
7031 error ("non-constant argument 0 to %qs", name);
7032 return const0_rtx;
7035 int dim = TREE_INT_CST_LOW (arg);
7036 switch (dim)
7038 case GOMP_DIM_GANG:
7039 case GOMP_DIM_WORKER:
7040 case GOMP_DIM_VECTOR:
7041 break;
7042 default:
7043 error ("illegal argument 0 to %qs", name);
7044 return const0_rtx;
7047 if (ignore)
7048 return target;
7050 if (target == NULL_RTX)
7051 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7053 if (!targetm.have_oacc_dim_size ())
7055 emit_move_insn (target, fallback_retval);
7056 return target;
7059 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
7060 emit_insn (gen_fn (reg, GEN_INT (dim)));
7061 if (reg != target)
7062 emit_move_insn (target, reg);
7064 return target;
7067 /* Expand a string compare operation using a sequence of char comparison
7068 to get rid of the calling overhead, with result going to TARGET if
7069 that's convenient.
7071 VAR_STR is the variable string source;
7072 CONST_STR is the constant string source;
7073 LENGTH is the number of chars to compare;
7074 CONST_STR_N indicates which source string is the constant string;
7075 IS_MEMCMP indicates whether it's a memcmp or strcmp.
7077 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7079 target = (int) (unsigned char) var_str[0]
7080 - (int) (unsigned char) const_str[0];
7081 if (target != 0)
7082 goto ne_label;
7084 target = (int) (unsigned char) var_str[length - 2]
7085 - (int) (unsigned char) const_str[length - 2];
7086 if (target != 0)
7087 goto ne_label;
7088 target = (int) (unsigned char) var_str[length - 1]
7089 - (int) (unsigned char) const_str[length - 1];
7090 ne_label:
7093 static rtx
7094 inline_string_cmp (rtx target, tree var_str, const char *const_str,
7095 unsigned HOST_WIDE_INT length,
7096 int const_str_n, machine_mode mode)
7098 HOST_WIDE_INT offset = 0;
7099 rtx var_rtx_array
7100 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
7101 rtx var_rtx = NULL_RTX;
7102 rtx const_rtx = NULL_RTX;
7103 rtx result = target ? target : gen_reg_rtx (mode);
7104 rtx_code_label *ne_label = gen_label_rtx ();
7105 tree unit_type_node = unsigned_char_type_node;
7106 scalar_int_mode unit_mode
7107 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
7109 start_sequence ();
7111 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
7113 var_rtx
7114 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
7115 const_rtx = c_readstr (const_str + offset, unit_mode);
7116 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
7117 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
7119 op0 = convert_modes (mode, unit_mode, op0, 1);
7120 op1 = convert_modes (mode, unit_mode, op1, 1);
7121 result = expand_simple_binop (mode, MINUS, op0, op1,
7122 result, 1, OPTAB_WIDEN);
7123 if (i < length - 1)
7124 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
7125 mode, true, ne_label);
7126 offset += GET_MODE_SIZE (unit_mode);
7129 emit_label (ne_label);
7130 rtx_insn *insns = get_insns ();
7131 end_sequence ();
7132 emit_insn (insns);
7134 return result;
7137 /* Inline expansion a call to str(n)cmp, with result going to
7138 TARGET if that's convenient.
7139 If the call is not been inlined, return NULL_RTX. */
7140 static rtx
7141 inline_expand_builtin_string_cmp (tree exp, rtx target)
7143 tree fndecl = get_callee_fndecl (exp);
7144 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7145 unsigned HOST_WIDE_INT length = 0;
7146 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
7148 /* Do NOT apply this inlining expansion when optimizing for size or
7149 optimization level below 2. */
7150 if (optimize < 2 || optimize_insn_for_size_p ())
7151 return NULL_RTX;
7153 gcc_checking_assert (fcode == BUILT_IN_STRCMP
7154 || fcode == BUILT_IN_STRNCMP
7155 || fcode == BUILT_IN_MEMCMP);
7157 /* On a target where the type of the call (int) has same or narrower presicion
7158 than unsigned char, give up the inlining expansion. */
7159 if (TYPE_PRECISION (unsigned_char_type_node)
7160 >= TYPE_PRECISION (TREE_TYPE (exp)))
7161 return NULL_RTX;
7163 tree arg1 = CALL_EXPR_ARG (exp, 0);
7164 tree arg2 = CALL_EXPR_ARG (exp, 1);
7165 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
7167 unsigned HOST_WIDE_INT len1 = 0;
7168 unsigned HOST_WIDE_INT len2 = 0;
7169 unsigned HOST_WIDE_INT len3 = 0;
7171 const char *src_str1 = c_getstr (arg1, &len1);
7172 const char *src_str2 = c_getstr (arg2, &len2);
7174 /* If neither strings is constant string, the call is not qualify. */
7175 if (!src_str1 && !src_str2)
7176 return NULL_RTX;
7178 /* For strncmp, if the length is not a const, not qualify. */
7179 if (is_ncmp)
7181 if (!tree_fits_uhwi_p (len3_tree))
7182 return NULL_RTX;
7183 else
7184 len3 = tree_to_uhwi (len3_tree);
7187 if (src_str1 != NULL)
7188 len1 = strnlen (src_str1, len1) + 1;
7190 if (src_str2 != NULL)
7191 len2 = strnlen (src_str2, len2) + 1;
7193 int const_str_n = 0;
7194 if (!len1)
7195 const_str_n = 2;
7196 else if (!len2)
7197 const_str_n = 1;
7198 else if (len2 > len1)
7199 const_str_n = 1;
7200 else
7201 const_str_n = 2;
7203 gcc_checking_assert (const_str_n > 0);
7204 length = (const_str_n == 1) ? len1 : len2;
7206 if (is_ncmp && len3 < length)
7207 length = len3;
7209 /* If the length of the comparision is larger than the threshold,
7210 do nothing. */
7211 if (length > (unsigned HOST_WIDE_INT)
7212 PARAM_VALUE (BUILTIN_STRING_CMP_INLINE_LENGTH))
7213 return NULL_RTX;
7215 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7217 /* Now, start inline expansion the call. */
7218 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
7219 (const_str_n == 1) ? src_str1 : src_str2, length,
7220 const_str_n, mode);
7223 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7224 represents the size of the first argument to that call, or VOIDmode
7225 if the argument is a pointer. IGNORE will be true if the result
7226 isn't used. */
7227 static rtx
7228 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
7229 bool ignore)
7231 rtx val, failsafe;
7232 unsigned nargs = call_expr_nargs (exp);
7234 tree arg0 = CALL_EXPR_ARG (exp, 0);
7236 if (mode == VOIDmode)
7238 mode = TYPE_MODE (TREE_TYPE (arg0));
7239 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
7242 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
7244 /* An optional second argument can be used as a failsafe value on
7245 some machines. If it isn't present, then the failsafe value is
7246 assumed to be 0. */
7247 if (nargs > 1)
7249 tree arg1 = CALL_EXPR_ARG (exp, 1);
7250 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
7252 else
7253 failsafe = const0_rtx;
7255 /* If the result isn't used, the behavior is undefined. It would be
7256 nice to emit a warning here, but path splitting means this might
7257 happen with legitimate code. So simply drop the builtin
7258 expansion in that case; we've handled any side-effects above. */
7259 if (ignore)
7260 return const0_rtx;
7262 /* If we don't have a suitable target, create one to hold the result. */
7263 if (target == NULL || GET_MODE (target) != mode)
7264 target = gen_reg_rtx (mode);
7266 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7267 val = convert_modes (mode, VOIDmode, val, false);
7269 return targetm.speculation_safe_value (mode, target, val, failsafe);
7272 /* Expand an expression EXP that calls a built-in function,
7273 with result going to TARGET if that's convenient
7274 (and in mode MODE if that's convenient).
7275 SUBTARGET may be used as the target for computing one of EXP's operands.
7276 IGNORE is nonzero if the value is to be ignored. */
7279 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7280 int ignore)
7282 tree fndecl = get_callee_fndecl (exp);
7283 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7284 int flags;
7286 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7287 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7289 /* When ASan is enabled, we don't want to expand some memory/string
7290 builtins and rely on libsanitizer's hooks. This allows us to avoid
7291 redundant checks and be sure, that possible overflow will be detected
7292 by ASan. */
7294 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7295 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7296 return expand_call (exp, target, ignore);
7298 /* When not optimizing, generate calls to library functions for a certain
7299 set of builtins. */
7300 if (!optimize
7301 && !called_as_built_in (fndecl)
7302 && fcode != BUILT_IN_FORK
7303 && fcode != BUILT_IN_EXECL
7304 && fcode != BUILT_IN_EXECV
7305 && fcode != BUILT_IN_EXECLP
7306 && fcode != BUILT_IN_EXECLE
7307 && fcode != BUILT_IN_EXECVP
7308 && fcode != BUILT_IN_EXECVE
7309 && !ALLOCA_FUNCTION_CODE_P (fcode)
7310 && fcode != BUILT_IN_FREE)
7311 return expand_call (exp, target, ignore);
7313 /* The built-in function expanders test for target == const0_rtx
7314 to determine whether the function's result will be ignored. */
7315 if (ignore)
7316 target = const0_rtx;
7318 /* If the result of a pure or const built-in function is ignored, and
7319 none of its arguments are volatile, we can avoid expanding the
7320 built-in call and just evaluate the arguments for side-effects. */
7321 if (target == const0_rtx
7322 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7323 && !(flags & ECF_LOOPING_CONST_OR_PURE))
7325 bool volatilep = false;
7326 tree arg;
7327 call_expr_arg_iterator iter;
7329 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7330 if (TREE_THIS_VOLATILE (arg))
7332 volatilep = true;
7333 break;
7336 if (! volatilep)
7338 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7339 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7340 return const0_rtx;
7344 switch (fcode)
7346 CASE_FLT_FN (BUILT_IN_FABS):
7347 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7348 case BUILT_IN_FABSD32:
7349 case BUILT_IN_FABSD64:
7350 case BUILT_IN_FABSD128:
7351 target = expand_builtin_fabs (exp, target, subtarget);
7352 if (target)
7353 return target;
7354 break;
7356 CASE_FLT_FN (BUILT_IN_COPYSIGN):
7357 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7358 target = expand_builtin_copysign (exp, target, subtarget);
7359 if (target)
7360 return target;
7361 break;
7363 /* Just do a normal library call if we were unable to fold
7364 the values. */
7365 CASE_FLT_FN (BUILT_IN_CABS):
7366 break;
7368 CASE_FLT_FN (BUILT_IN_FMA):
7369 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7370 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7371 if (target)
7372 return target;
7373 break;
7375 CASE_FLT_FN (BUILT_IN_ILOGB):
7376 if (! flag_unsafe_math_optimizations)
7377 break;
7378 gcc_fallthrough ();
7379 CASE_FLT_FN (BUILT_IN_ISINF):
7380 CASE_FLT_FN (BUILT_IN_FINITE):
7381 case BUILT_IN_ISFINITE:
7382 case BUILT_IN_ISNORMAL:
7383 target = expand_builtin_interclass_mathfn (exp, target);
7384 if (target)
7385 return target;
7386 break;
7388 CASE_FLT_FN (BUILT_IN_ICEIL):
7389 CASE_FLT_FN (BUILT_IN_LCEIL):
7390 CASE_FLT_FN (BUILT_IN_LLCEIL):
7391 CASE_FLT_FN (BUILT_IN_LFLOOR):
7392 CASE_FLT_FN (BUILT_IN_IFLOOR):
7393 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7394 target = expand_builtin_int_roundingfn (exp, target);
7395 if (target)
7396 return target;
7397 break;
7399 CASE_FLT_FN (BUILT_IN_IRINT):
7400 CASE_FLT_FN (BUILT_IN_LRINT):
7401 CASE_FLT_FN (BUILT_IN_LLRINT):
7402 CASE_FLT_FN (BUILT_IN_IROUND):
7403 CASE_FLT_FN (BUILT_IN_LROUND):
7404 CASE_FLT_FN (BUILT_IN_LLROUND):
7405 target = expand_builtin_int_roundingfn_2 (exp, target);
7406 if (target)
7407 return target;
7408 break;
7410 CASE_FLT_FN (BUILT_IN_POWI):
7411 target = expand_builtin_powi (exp, target);
7412 if (target)
7413 return target;
7414 break;
7416 CASE_FLT_FN (BUILT_IN_CEXPI):
7417 target = expand_builtin_cexpi (exp, target);
7418 gcc_assert (target);
7419 return target;
7421 CASE_FLT_FN (BUILT_IN_SIN):
7422 CASE_FLT_FN (BUILT_IN_COS):
7423 if (! flag_unsafe_math_optimizations)
7424 break;
7425 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7426 if (target)
7427 return target;
7428 break;
7430 CASE_FLT_FN (BUILT_IN_SINCOS):
7431 if (! flag_unsafe_math_optimizations)
7432 break;
7433 target = expand_builtin_sincos (exp);
7434 if (target)
7435 return target;
7436 break;
7438 case BUILT_IN_APPLY_ARGS:
7439 return expand_builtin_apply_args ();
7441 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7442 FUNCTION with a copy of the parameters described by
7443 ARGUMENTS, and ARGSIZE. It returns a block of memory
7444 allocated on the stack into which is stored all the registers
7445 that might possibly be used for returning the result of a
7446 function. ARGUMENTS is the value returned by
7447 __builtin_apply_args. ARGSIZE is the number of bytes of
7448 arguments that must be copied. ??? How should this value be
7449 computed? We'll also need a safe worst case value for varargs
7450 functions. */
7451 case BUILT_IN_APPLY:
7452 if (!validate_arglist (exp, POINTER_TYPE,
7453 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7454 && !validate_arglist (exp, REFERENCE_TYPE,
7455 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7456 return const0_rtx;
7457 else
7459 rtx ops[3];
7461 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7462 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7463 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7465 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7468 /* __builtin_return (RESULT) causes the function to return the
7469 value described by RESULT. RESULT is address of the block of
7470 memory returned by __builtin_apply. */
7471 case BUILT_IN_RETURN:
7472 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7473 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7474 return const0_rtx;
7476 case BUILT_IN_SAVEREGS:
7477 return expand_builtin_saveregs ();
7479 case BUILT_IN_VA_ARG_PACK:
7480 /* All valid uses of __builtin_va_arg_pack () are removed during
7481 inlining. */
7482 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
7483 return const0_rtx;
7485 case BUILT_IN_VA_ARG_PACK_LEN:
7486 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7487 inlining. */
7488 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
7489 return const0_rtx;
7491 /* Return the address of the first anonymous stack arg. */
7492 case BUILT_IN_NEXT_ARG:
7493 if (fold_builtin_next_arg (exp, false))
7494 return const0_rtx;
7495 return expand_builtin_next_arg ();
7497 case BUILT_IN_CLEAR_CACHE:
7498 target = expand_builtin___clear_cache (exp);
7499 if (target)
7500 return target;
7501 break;
7503 case BUILT_IN_CLASSIFY_TYPE:
7504 return expand_builtin_classify_type (exp);
7506 case BUILT_IN_CONSTANT_P:
7507 return const0_rtx;
7509 case BUILT_IN_FRAME_ADDRESS:
7510 case BUILT_IN_RETURN_ADDRESS:
7511 return expand_builtin_frame_address (fndecl, exp);
7513 /* Returns the address of the area where the structure is returned.
7514 0 otherwise. */
7515 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7516 if (call_expr_nargs (exp) != 0
7517 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7518 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7519 return const0_rtx;
7520 else
7521 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7523 CASE_BUILT_IN_ALLOCA:
7524 target = expand_builtin_alloca (exp);
7525 if (target)
7526 return target;
7527 break;
7529 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7530 return expand_asan_emit_allocas_unpoison (exp);
7532 case BUILT_IN_STACK_SAVE:
7533 return expand_stack_save ();
7535 case BUILT_IN_STACK_RESTORE:
7536 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7537 return const0_rtx;
7539 case BUILT_IN_BSWAP16:
7540 case BUILT_IN_BSWAP32:
7541 case BUILT_IN_BSWAP64:
7542 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7543 if (target)
7544 return target;
7545 break;
7547 CASE_INT_FN (BUILT_IN_FFS):
7548 target = expand_builtin_unop (target_mode, exp, target,
7549 subtarget, ffs_optab);
7550 if (target)
7551 return target;
7552 break;
7554 CASE_INT_FN (BUILT_IN_CLZ):
7555 target = expand_builtin_unop (target_mode, exp, target,
7556 subtarget, clz_optab);
7557 if (target)
7558 return target;
7559 break;
7561 CASE_INT_FN (BUILT_IN_CTZ):
7562 target = expand_builtin_unop (target_mode, exp, target,
7563 subtarget, ctz_optab);
7564 if (target)
7565 return target;
7566 break;
7568 CASE_INT_FN (BUILT_IN_CLRSB):
7569 target = expand_builtin_unop (target_mode, exp, target,
7570 subtarget, clrsb_optab);
7571 if (target)
7572 return target;
7573 break;
7575 CASE_INT_FN (BUILT_IN_POPCOUNT):
7576 target = expand_builtin_unop (target_mode, exp, target,
7577 subtarget, popcount_optab);
7578 if (target)
7579 return target;
7580 break;
7582 CASE_INT_FN (BUILT_IN_PARITY):
7583 target = expand_builtin_unop (target_mode, exp, target,
7584 subtarget, parity_optab);
7585 if (target)
7586 return target;
7587 break;
7589 case BUILT_IN_STRLEN:
7590 target = expand_builtin_strlen (exp, target, target_mode);
7591 if (target)
7592 return target;
7593 break;
7595 case BUILT_IN_STRNLEN:
7596 target = expand_builtin_strnlen (exp, target, target_mode);
7597 if (target)
7598 return target;
7599 break;
7601 case BUILT_IN_STRCAT:
7602 target = expand_builtin_strcat (exp, target);
7603 if (target)
7604 return target;
7605 break;
7607 case BUILT_IN_STRCPY:
7608 target = expand_builtin_strcpy (exp, target);
7609 if (target)
7610 return target;
7611 break;
7613 case BUILT_IN_STRNCAT:
7614 target = expand_builtin_strncat (exp, target);
7615 if (target)
7616 return target;
7617 break;
7619 case BUILT_IN_STRNCPY:
7620 target = expand_builtin_strncpy (exp, target);
7621 if (target)
7622 return target;
7623 break;
7625 case BUILT_IN_STPCPY:
7626 target = expand_builtin_stpcpy (exp, target, mode);
7627 if (target)
7628 return target;
7629 break;
7631 case BUILT_IN_STPNCPY:
7632 target = expand_builtin_stpncpy (exp, target);
7633 if (target)
7634 return target;
7635 break;
7637 case BUILT_IN_MEMCHR:
7638 target = expand_builtin_memchr (exp, target);
7639 if (target)
7640 return target;
7641 break;
7643 case BUILT_IN_MEMCPY:
7644 target = expand_builtin_memcpy (exp, target);
7645 if (target)
7646 return target;
7647 break;
7649 case BUILT_IN_MEMMOVE:
7650 target = expand_builtin_memmove (exp, target);
7651 if (target)
7652 return target;
7653 break;
7655 case BUILT_IN_MEMPCPY:
7656 target = expand_builtin_mempcpy (exp, target);
7657 if (target)
7658 return target;
7659 break;
7661 case BUILT_IN_MEMSET:
7662 target = expand_builtin_memset (exp, target, mode);
7663 if (target)
7664 return target;
7665 break;
7667 case BUILT_IN_BZERO:
7668 target = expand_builtin_bzero (exp);
7669 if (target)
7670 return target;
7671 break;
7673 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7674 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
7675 when changing it to a strcmp call. */
7676 case BUILT_IN_STRCMP_EQ:
7677 target = expand_builtin_memcmp (exp, target, true);
7678 if (target)
7679 return target;
7681 /* Change this call back to a BUILT_IN_STRCMP. */
7682 TREE_OPERAND (exp, 1)
7683 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7685 /* Delete the last parameter. */
7686 unsigned int i;
7687 vec<tree, va_gc> *arg_vec;
7688 vec_alloc (arg_vec, 2);
7689 for (i = 0; i < 2; i++)
7690 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7691 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7692 /* FALLTHROUGH */
7694 case BUILT_IN_STRCMP:
7695 target = expand_builtin_strcmp (exp, target);
7696 if (target)
7697 return target;
7698 break;
7700 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7701 back to a BUILT_IN_STRNCMP. */
7702 case BUILT_IN_STRNCMP_EQ:
7703 target = expand_builtin_memcmp (exp, target, true);
7704 if (target)
7705 return target;
7707 /* Change it back to a BUILT_IN_STRNCMP. */
7708 TREE_OPERAND (exp, 1)
7709 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7710 /* FALLTHROUGH */
7712 case BUILT_IN_STRNCMP:
7713 target = expand_builtin_strncmp (exp, target, mode);
7714 if (target)
7715 return target;
7716 break;
7718 case BUILT_IN_BCMP:
7719 case BUILT_IN_MEMCMP:
7720 case BUILT_IN_MEMCMP_EQ:
7721 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7722 if (target)
7723 return target;
7724 if (fcode == BUILT_IN_MEMCMP_EQ)
7726 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7727 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7729 break;
7731 case BUILT_IN_SETJMP:
7732 /* This should have been lowered to the builtins below. */
7733 gcc_unreachable ();
7735 case BUILT_IN_SETJMP_SETUP:
7736 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7737 and the receiver label. */
7738 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7740 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7741 VOIDmode, EXPAND_NORMAL);
7742 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7743 rtx_insn *label_r = label_rtx (label);
7745 /* This is copied from the handling of non-local gotos. */
7746 expand_builtin_setjmp_setup (buf_addr, label_r);
7747 nonlocal_goto_handler_labels
7748 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7749 nonlocal_goto_handler_labels);
7750 /* ??? Do not let expand_label treat us as such since we would
7751 not want to be both on the list of non-local labels and on
7752 the list of forced labels. */
7753 FORCED_LABEL (label) = 0;
7754 return const0_rtx;
7756 break;
7758 case BUILT_IN_SETJMP_RECEIVER:
7759 /* __builtin_setjmp_receiver is passed the receiver label. */
7760 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7762 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7763 rtx_insn *label_r = label_rtx (label);
7765 expand_builtin_setjmp_receiver (label_r);
7766 return const0_rtx;
7768 break;
7770 /* __builtin_longjmp is passed a pointer to an array of five words.
7771 It's similar to the C library longjmp function but works with
7772 __builtin_setjmp above. */
7773 case BUILT_IN_LONGJMP:
7774 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7776 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7777 VOIDmode, EXPAND_NORMAL);
7778 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7780 if (value != const1_rtx)
7782 error ("%<__builtin_longjmp%> second argument must be 1");
7783 return const0_rtx;
7786 expand_builtin_longjmp (buf_addr, value);
7787 return const0_rtx;
7789 break;
7791 case BUILT_IN_NONLOCAL_GOTO:
7792 target = expand_builtin_nonlocal_goto (exp);
7793 if (target)
7794 return target;
7795 break;
7797 /* This updates the setjmp buffer that is its argument with the value
7798 of the current stack pointer. */
7799 case BUILT_IN_UPDATE_SETJMP_BUF:
7800 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7802 rtx buf_addr
7803 = expand_normal (CALL_EXPR_ARG (exp, 0));
7805 expand_builtin_update_setjmp_buf (buf_addr);
7806 return const0_rtx;
7808 break;
7810 case BUILT_IN_TRAP:
7811 expand_builtin_trap ();
7812 return const0_rtx;
7814 case BUILT_IN_UNREACHABLE:
7815 expand_builtin_unreachable ();
7816 return const0_rtx;
7818 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7819 case BUILT_IN_SIGNBITD32:
7820 case BUILT_IN_SIGNBITD64:
7821 case BUILT_IN_SIGNBITD128:
7822 target = expand_builtin_signbit (exp, target);
7823 if (target)
7824 return target;
7825 break;
7827 /* Various hooks for the DWARF 2 __throw routine. */
7828 case BUILT_IN_UNWIND_INIT:
7829 expand_builtin_unwind_init ();
7830 return const0_rtx;
7831 case BUILT_IN_DWARF_CFA:
7832 return virtual_cfa_rtx;
7833 #ifdef DWARF2_UNWIND_INFO
7834 case BUILT_IN_DWARF_SP_COLUMN:
7835 return expand_builtin_dwarf_sp_column ();
7836 case BUILT_IN_INIT_DWARF_REG_SIZES:
7837 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7838 return const0_rtx;
7839 #endif
7840 case BUILT_IN_FROB_RETURN_ADDR:
7841 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7842 case BUILT_IN_EXTRACT_RETURN_ADDR:
7843 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7844 case BUILT_IN_EH_RETURN:
7845 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7846 CALL_EXPR_ARG (exp, 1));
7847 return const0_rtx;
7848 case BUILT_IN_EH_RETURN_DATA_REGNO:
7849 return expand_builtin_eh_return_data_regno (exp);
7850 case BUILT_IN_EXTEND_POINTER:
7851 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7852 case BUILT_IN_EH_POINTER:
7853 return expand_builtin_eh_pointer (exp);
7854 case BUILT_IN_EH_FILTER:
7855 return expand_builtin_eh_filter (exp);
7856 case BUILT_IN_EH_COPY_VALUES:
7857 return expand_builtin_eh_copy_values (exp);
7859 case BUILT_IN_VA_START:
7860 return expand_builtin_va_start (exp);
7861 case BUILT_IN_VA_END:
7862 return expand_builtin_va_end (exp);
7863 case BUILT_IN_VA_COPY:
7864 return expand_builtin_va_copy (exp);
7865 case BUILT_IN_EXPECT:
7866 return expand_builtin_expect (exp, target);
7867 case BUILT_IN_EXPECT_WITH_PROBABILITY:
7868 return expand_builtin_expect_with_probability (exp, target);
7869 case BUILT_IN_ASSUME_ALIGNED:
7870 return expand_builtin_assume_aligned (exp, target);
7871 case BUILT_IN_PREFETCH:
7872 expand_builtin_prefetch (exp);
7873 return const0_rtx;
7875 case BUILT_IN_INIT_TRAMPOLINE:
7876 return expand_builtin_init_trampoline (exp, true);
7877 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7878 return expand_builtin_init_trampoline (exp, false);
7879 case BUILT_IN_ADJUST_TRAMPOLINE:
7880 return expand_builtin_adjust_trampoline (exp);
7882 case BUILT_IN_INIT_DESCRIPTOR:
7883 return expand_builtin_init_descriptor (exp);
7884 case BUILT_IN_ADJUST_DESCRIPTOR:
7885 return expand_builtin_adjust_descriptor (exp);
7887 case BUILT_IN_FORK:
7888 case BUILT_IN_EXECL:
7889 case BUILT_IN_EXECV:
7890 case BUILT_IN_EXECLP:
7891 case BUILT_IN_EXECLE:
7892 case BUILT_IN_EXECVP:
7893 case BUILT_IN_EXECVE:
7894 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7895 if (target)
7896 return target;
7897 break;
7899 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7900 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7901 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7902 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7903 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7904 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7905 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7906 if (target)
7907 return target;
7908 break;
7910 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7911 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7912 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7913 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7914 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7915 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7916 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7917 if (target)
7918 return target;
7919 break;
7921 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7922 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7923 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7924 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7925 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7926 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7927 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7928 if (target)
7929 return target;
7930 break;
7932 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7933 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7934 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7935 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7936 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7937 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7938 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7939 if (target)
7940 return target;
7941 break;
7943 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7944 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7945 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7946 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7947 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7948 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7949 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7950 if (target)
7951 return target;
7952 break;
7954 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7955 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7956 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7957 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7958 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7959 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7960 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7961 if (target)
7962 return target;
7963 break;
7965 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7966 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7967 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7968 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7969 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7970 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7971 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7972 if (target)
7973 return target;
7974 break;
7976 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7977 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7978 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7979 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7980 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7981 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7982 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7983 if (target)
7984 return target;
7985 break;
7987 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7988 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7989 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7990 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7991 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7992 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7993 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7994 if (target)
7995 return target;
7996 break;
7998 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7999 case BUILT_IN_SYNC_AND_AND_FETCH_2:
8000 case BUILT_IN_SYNC_AND_AND_FETCH_4:
8001 case BUILT_IN_SYNC_AND_AND_FETCH_8:
8002 case BUILT_IN_SYNC_AND_AND_FETCH_16:
8003 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
8004 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
8005 if (target)
8006 return target;
8007 break;
8009 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
8010 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
8011 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
8012 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
8013 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
8014 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
8015 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
8016 if (target)
8017 return target;
8018 break;
8020 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
8021 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
8022 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
8023 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
8024 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
8025 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
8026 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
8027 if (target)
8028 return target;
8029 break;
8031 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
8032 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
8033 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
8034 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
8035 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
8036 if (mode == VOIDmode)
8037 mode = TYPE_MODE (boolean_type_node);
8038 if (!target || !register_operand (target, mode))
8039 target = gen_reg_rtx (mode);
8041 mode = get_builtin_sync_mode
8042 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
8043 target = expand_builtin_compare_and_swap (mode, exp, true, target);
8044 if (target)
8045 return target;
8046 break;
8048 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
8049 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
8050 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
8051 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
8052 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
8053 mode = get_builtin_sync_mode
8054 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
8055 target = expand_builtin_compare_and_swap (mode, exp, false, target);
8056 if (target)
8057 return target;
8058 break;
8060 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
8061 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
8062 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
8063 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
8064 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
8065 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
8066 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
8067 if (target)
8068 return target;
8069 break;
8071 case BUILT_IN_SYNC_LOCK_RELEASE_1:
8072 case BUILT_IN_SYNC_LOCK_RELEASE_2:
8073 case BUILT_IN_SYNC_LOCK_RELEASE_4:
8074 case BUILT_IN_SYNC_LOCK_RELEASE_8:
8075 case BUILT_IN_SYNC_LOCK_RELEASE_16:
8076 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
8077 expand_builtin_sync_lock_release (mode, exp);
8078 return const0_rtx;
8080 case BUILT_IN_SYNC_SYNCHRONIZE:
8081 expand_builtin_sync_synchronize ();
8082 return const0_rtx;
8084 case BUILT_IN_ATOMIC_EXCHANGE_1:
8085 case BUILT_IN_ATOMIC_EXCHANGE_2:
8086 case BUILT_IN_ATOMIC_EXCHANGE_4:
8087 case BUILT_IN_ATOMIC_EXCHANGE_8:
8088 case BUILT_IN_ATOMIC_EXCHANGE_16:
8089 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
8090 target = expand_builtin_atomic_exchange (mode, exp, target);
8091 if (target)
8092 return target;
8093 break;
8095 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
8096 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
8097 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
8098 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
8099 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
8101 unsigned int nargs, z;
8102 vec<tree, va_gc> *vec;
8104 mode =
8105 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
8106 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
8107 if (target)
8108 return target;
8110 /* If this is turned into an external library call, the weak parameter
8111 must be dropped to match the expected parameter list. */
8112 nargs = call_expr_nargs (exp);
8113 vec_alloc (vec, nargs - 1);
8114 for (z = 0; z < 3; z++)
8115 vec->quick_push (CALL_EXPR_ARG (exp, z));
8116 /* Skip the boolean weak parameter. */
8117 for (z = 4; z < 6; z++)
8118 vec->quick_push (CALL_EXPR_ARG (exp, z));
8119 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
8120 break;
8123 case BUILT_IN_ATOMIC_LOAD_1:
8124 case BUILT_IN_ATOMIC_LOAD_2:
8125 case BUILT_IN_ATOMIC_LOAD_4:
8126 case BUILT_IN_ATOMIC_LOAD_8:
8127 case BUILT_IN_ATOMIC_LOAD_16:
8128 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
8129 target = expand_builtin_atomic_load (mode, exp, target);
8130 if (target)
8131 return target;
8132 break;
8134 case BUILT_IN_ATOMIC_STORE_1:
8135 case BUILT_IN_ATOMIC_STORE_2:
8136 case BUILT_IN_ATOMIC_STORE_4:
8137 case BUILT_IN_ATOMIC_STORE_8:
8138 case BUILT_IN_ATOMIC_STORE_16:
8139 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
8140 target = expand_builtin_atomic_store (mode, exp);
8141 if (target)
8142 return const0_rtx;
8143 break;
8145 case BUILT_IN_ATOMIC_ADD_FETCH_1:
8146 case BUILT_IN_ATOMIC_ADD_FETCH_2:
8147 case BUILT_IN_ATOMIC_ADD_FETCH_4:
8148 case BUILT_IN_ATOMIC_ADD_FETCH_8:
8149 case BUILT_IN_ATOMIC_ADD_FETCH_16:
8151 enum built_in_function lib;
8152 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
8153 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
8154 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
8155 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
8156 ignore, lib);
8157 if (target)
8158 return target;
8159 break;
8161 case BUILT_IN_ATOMIC_SUB_FETCH_1:
8162 case BUILT_IN_ATOMIC_SUB_FETCH_2:
8163 case BUILT_IN_ATOMIC_SUB_FETCH_4:
8164 case BUILT_IN_ATOMIC_SUB_FETCH_8:
8165 case BUILT_IN_ATOMIC_SUB_FETCH_16:
8167 enum built_in_function lib;
8168 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
8169 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
8170 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
8171 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
8172 ignore, lib);
8173 if (target)
8174 return target;
8175 break;
8177 case BUILT_IN_ATOMIC_AND_FETCH_1:
8178 case BUILT_IN_ATOMIC_AND_FETCH_2:
8179 case BUILT_IN_ATOMIC_AND_FETCH_4:
8180 case BUILT_IN_ATOMIC_AND_FETCH_8:
8181 case BUILT_IN_ATOMIC_AND_FETCH_16:
8183 enum built_in_function lib;
8184 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
8185 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
8186 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
8187 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
8188 ignore, lib);
8189 if (target)
8190 return target;
8191 break;
8193 case BUILT_IN_ATOMIC_NAND_FETCH_1:
8194 case BUILT_IN_ATOMIC_NAND_FETCH_2:
8195 case BUILT_IN_ATOMIC_NAND_FETCH_4:
8196 case BUILT_IN_ATOMIC_NAND_FETCH_8:
8197 case BUILT_IN_ATOMIC_NAND_FETCH_16:
8199 enum built_in_function lib;
8200 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
8201 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
8202 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
8203 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
8204 ignore, lib);
8205 if (target)
8206 return target;
8207 break;
8209 case BUILT_IN_ATOMIC_XOR_FETCH_1:
8210 case BUILT_IN_ATOMIC_XOR_FETCH_2:
8211 case BUILT_IN_ATOMIC_XOR_FETCH_4:
8212 case BUILT_IN_ATOMIC_XOR_FETCH_8:
8213 case BUILT_IN_ATOMIC_XOR_FETCH_16:
8215 enum built_in_function lib;
8216 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
8217 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
8218 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
8219 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
8220 ignore, lib);
8221 if (target)
8222 return target;
8223 break;
8225 case BUILT_IN_ATOMIC_OR_FETCH_1:
8226 case BUILT_IN_ATOMIC_OR_FETCH_2:
8227 case BUILT_IN_ATOMIC_OR_FETCH_4:
8228 case BUILT_IN_ATOMIC_OR_FETCH_8:
8229 case BUILT_IN_ATOMIC_OR_FETCH_16:
8231 enum built_in_function lib;
8232 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
8233 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
8234 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
8235 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
8236 ignore, lib);
8237 if (target)
8238 return target;
8239 break;
8241 case BUILT_IN_ATOMIC_FETCH_ADD_1:
8242 case BUILT_IN_ATOMIC_FETCH_ADD_2:
8243 case BUILT_IN_ATOMIC_FETCH_ADD_4:
8244 case BUILT_IN_ATOMIC_FETCH_ADD_8:
8245 case BUILT_IN_ATOMIC_FETCH_ADD_16:
8246 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
8247 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
8248 ignore, BUILT_IN_NONE);
8249 if (target)
8250 return target;
8251 break;
8253 case BUILT_IN_ATOMIC_FETCH_SUB_1:
8254 case BUILT_IN_ATOMIC_FETCH_SUB_2:
8255 case BUILT_IN_ATOMIC_FETCH_SUB_4:
8256 case BUILT_IN_ATOMIC_FETCH_SUB_8:
8257 case BUILT_IN_ATOMIC_FETCH_SUB_16:
8258 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
8259 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
8260 ignore, BUILT_IN_NONE);
8261 if (target)
8262 return target;
8263 break;
8265 case BUILT_IN_ATOMIC_FETCH_AND_1:
8266 case BUILT_IN_ATOMIC_FETCH_AND_2:
8267 case BUILT_IN_ATOMIC_FETCH_AND_4:
8268 case BUILT_IN_ATOMIC_FETCH_AND_8:
8269 case BUILT_IN_ATOMIC_FETCH_AND_16:
8270 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8271 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8272 ignore, BUILT_IN_NONE);
8273 if (target)
8274 return target;
8275 break;
8277 case BUILT_IN_ATOMIC_FETCH_NAND_1:
8278 case BUILT_IN_ATOMIC_FETCH_NAND_2:
8279 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8280 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8281 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8282 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8283 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8284 ignore, BUILT_IN_NONE);
8285 if (target)
8286 return target;
8287 break;
8289 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8290 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8291 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8292 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8293 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8294 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8295 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8296 ignore, BUILT_IN_NONE);
8297 if (target)
8298 return target;
8299 break;
8301 case BUILT_IN_ATOMIC_FETCH_OR_1:
8302 case BUILT_IN_ATOMIC_FETCH_OR_2:
8303 case BUILT_IN_ATOMIC_FETCH_OR_4:
8304 case BUILT_IN_ATOMIC_FETCH_OR_8:
8305 case BUILT_IN_ATOMIC_FETCH_OR_16:
8306 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8307 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8308 ignore, BUILT_IN_NONE);
8309 if (target)
8310 return target;
8311 break;
8313 case BUILT_IN_ATOMIC_TEST_AND_SET:
8314 return expand_builtin_atomic_test_and_set (exp, target);
8316 case BUILT_IN_ATOMIC_CLEAR:
8317 return expand_builtin_atomic_clear (exp);
8319 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8320 return expand_builtin_atomic_always_lock_free (exp);
8322 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8323 target = expand_builtin_atomic_is_lock_free (exp);
8324 if (target)
8325 return target;
8326 break;
8328 case BUILT_IN_ATOMIC_THREAD_FENCE:
8329 expand_builtin_atomic_thread_fence (exp);
8330 return const0_rtx;
8332 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8333 expand_builtin_atomic_signal_fence (exp);
8334 return const0_rtx;
8336 case BUILT_IN_OBJECT_SIZE:
8337 return expand_builtin_object_size (exp);
8339 case BUILT_IN_MEMCPY_CHK:
8340 case BUILT_IN_MEMPCPY_CHK:
8341 case BUILT_IN_MEMMOVE_CHK:
8342 case BUILT_IN_MEMSET_CHK:
8343 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8344 if (target)
8345 return target;
8346 break;
8348 case BUILT_IN_STRCPY_CHK:
8349 case BUILT_IN_STPCPY_CHK:
8350 case BUILT_IN_STRNCPY_CHK:
8351 case BUILT_IN_STPNCPY_CHK:
8352 case BUILT_IN_STRCAT_CHK:
8353 case BUILT_IN_STRNCAT_CHK:
8354 case BUILT_IN_SNPRINTF_CHK:
8355 case BUILT_IN_VSNPRINTF_CHK:
8356 maybe_emit_chk_warning (exp, fcode);
8357 break;
8359 case BUILT_IN_SPRINTF_CHK:
8360 case BUILT_IN_VSPRINTF_CHK:
8361 maybe_emit_sprintf_chk_warning (exp, fcode);
8362 break;
8364 case BUILT_IN_FREE:
8365 if (warn_free_nonheap_object)
8366 maybe_emit_free_warning (exp);
8367 break;
8369 case BUILT_IN_THREAD_POINTER:
8370 return expand_builtin_thread_pointer (exp, target);
8372 case BUILT_IN_SET_THREAD_POINTER:
8373 expand_builtin_set_thread_pointer (exp);
8374 return const0_rtx;
8376 case BUILT_IN_ACC_ON_DEVICE:
8377 /* Do library call, if we failed to expand the builtin when
8378 folding. */
8379 break;
8381 case BUILT_IN_GOACC_PARLEVEL_ID:
8382 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8383 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8385 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8386 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8388 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8389 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8390 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8391 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8392 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8393 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8394 return expand_speculation_safe_value (mode, exp, target, ignore);
8396 default: /* just do library call, if unknown builtin */
8397 break;
8400 /* The switch statement above can drop through to cause the function
8401 to be called normally. */
8402 return expand_call (exp, target, ignore);
8405 /* Determine whether a tree node represents a call to a built-in
8406 function. If the tree T is a call to a built-in function with
8407 the right number of arguments of the appropriate types, return
8408 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8409 Otherwise the return value is END_BUILTINS. */
8411 enum built_in_function
8412 builtin_mathfn_code (const_tree t)
8414 const_tree fndecl, arg, parmlist;
8415 const_tree argtype, parmtype;
8416 const_call_expr_arg_iterator iter;
8418 if (TREE_CODE (t) != CALL_EXPR)
8419 return END_BUILTINS;
8421 fndecl = get_callee_fndecl (t);
8422 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8423 return END_BUILTINS;
8425 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8426 init_const_call_expr_arg_iterator (t, &iter);
8427 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8429 /* If a function doesn't take a variable number of arguments,
8430 the last element in the list will have type `void'. */
8431 parmtype = TREE_VALUE (parmlist);
8432 if (VOID_TYPE_P (parmtype))
8434 if (more_const_call_expr_args_p (&iter))
8435 return END_BUILTINS;
8436 return DECL_FUNCTION_CODE (fndecl);
8439 if (! more_const_call_expr_args_p (&iter))
8440 return END_BUILTINS;
8442 arg = next_const_call_expr_arg (&iter);
8443 argtype = TREE_TYPE (arg);
8445 if (SCALAR_FLOAT_TYPE_P (parmtype))
8447 if (! SCALAR_FLOAT_TYPE_P (argtype))
8448 return END_BUILTINS;
8450 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8452 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8453 return END_BUILTINS;
8455 else if (POINTER_TYPE_P (parmtype))
8457 if (! POINTER_TYPE_P (argtype))
8458 return END_BUILTINS;
8460 else if (INTEGRAL_TYPE_P (parmtype))
8462 if (! INTEGRAL_TYPE_P (argtype))
8463 return END_BUILTINS;
8465 else
8466 return END_BUILTINS;
8469 /* Variable-length argument list. */
8470 return DECL_FUNCTION_CODE (fndecl);
8473 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8474 evaluate to a constant. */
8476 static tree
8477 fold_builtin_constant_p (tree arg)
8479 /* We return 1 for a numeric type that's known to be a constant
8480 value at compile-time or for an aggregate type that's a
8481 literal constant. */
8482 STRIP_NOPS (arg);
8484 /* If we know this is a constant, emit the constant of one. */
8485 if (CONSTANT_CLASS_P (arg)
8486 || (TREE_CODE (arg) == CONSTRUCTOR
8487 && TREE_CONSTANT (arg)))
8488 return integer_one_node;
8489 if (TREE_CODE (arg) == ADDR_EXPR)
8491 tree op = TREE_OPERAND (arg, 0);
8492 if (TREE_CODE (op) == STRING_CST
8493 || (TREE_CODE (op) == ARRAY_REF
8494 && integer_zerop (TREE_OPERAND (op, 1))
8495 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8496 return integer_one_node;
8499 /* If this expression has side effects, show we don't know it to be a
8500 constant. Likewise if it's a pointer or aggregate type since in
8501 those case we only want literals, since those are only optimized
8502 when generating RTL, not later.
8503 And finally, if we are compiling an initializer, not code, we
8504 need to return a definite result now; there's not going to be any
8505 more optimization done. */
8506 if (TREE_SIDE_EFFECTS (arg)
8507 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8508 || POINTER_TYPE_P (TREE_TYPE (arg))
8509 || cfun == 0
8510 || folding_initializer
8511 || force_folding_builtin_constant_p)
8512 return integer_zero_node;
8514 return NULL_TREE;
8517 /* Create builtin_expect or builtin_expect_with_probability
8518 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8519 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8520 builtin_expect_with_probability instead uses third argument as PROBABILITY
8521 value. */
8523 static tree
8524 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8525 tree predictor, tree probability)
8527 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
8529 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
8530 : BUILT_IN_EXPECT_WITH_PROBABILITY);
8531 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8532 ret_type = TREE_TYPE (TREE_TYPE (fn));
8533 pred_type = TREE_VALUE (arg_types);
8534 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8536 pred = fold_convert_loc (loc, pred_type, pred);
8537 expected = fold_convert_loc (loc, expected_type, expected);
8539 if (probability)
8540 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
8541 else
8542 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8543 predictor);
8545 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8546 build_int_cst (ret_type, 0));
8549 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
8550 NULL_TREE if no simplification is possible. */
8552 tree
8553 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
8554 tree arg3)
8556 tree inner, fndecl, inner_arg0;
8557 enum tree_code code;
8559 /* Distribute the expected value over short-circuiting operators.
8560 See through the cast from truthvalue_type_node to long. */
8561 inner_arg0 = arg0;
8562 while (CONVERT_EXPR_P (inner_arg0)
8563 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8564 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8565 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8567 /* If this is a builtin_expect within a builtin_expect keep the
8568 inner one. See through a comparison against a constant. It
8569 might have been added to create a thruthvalue. */
8570 inner = inner_arg0;
8572 if (COMPARISON_CLASS_P (inner)
8573 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8574 inner = TREE_OPERAND (inner, 0);
8576 if (TREE_CODE (inner) == CALL_EXPR
8577 && (fndecl = get_callee_fndecl (inner))
8578 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
8579 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
8580 return arg0;
8582 inner = inner_arg0;
8583 code = TREE_CODE (inner);
8584 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8586 tree op0 = TREE_OPERAND (inner, 0);
8587 tree op1 = TREE_OPERAND (inner, 1);
8588 arg1 = save_expr (arg1);
8590 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
8591 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
8592 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8594 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8597 /* If the argument isn't invariant then there's nothing else we can do. */
8598 if (!TREE_CONSTANT (inner_arg0))
8599 return NULL_TREE;
8601 /* If we expect that a comparison against the argument will fold to
8602 a constant return the constant. In practice, this means a true
8603 constant or the address of a non-weak symbol. */
8604 inner = inner_arg0;
8605 STRIP_NOPS (inner);
8606 if (TREE_CODE (inner) == ADDR_EXPR)
8610 inner = TREE_OPERAND (inner, 0);
8612 while (TREE_CODE (inner) == COMPONENT_REF
8613 || TREE_CODE (inner) == ARRAY_REF);
8614 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8615 return NULL_TREE;
8618 /* Otherwise, ARG0 already has the proper type for the return value. */
8619 return arg0;
8622 /* Fold a call to __builtin_classify_type with argument ARG. */
8624 static tree
8625 fold_builtin_classify_type (tree arg)
8627 if (arg == 0)
8628 return build_int_cst (integer_type_node, no_type_class);
8630 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8633 /* Fold a call to __builtin_strlen with argument ARG. */
8635 static tree
8636 fold_builtin_strlen (location_t loc, tree type, tree arg)
8638 if (!validate_arg (arg, POINTER_TYPE))
8639 return NULL_TREE;
8640 else
8642 c_strlen_data lendata = { };
8643 tree len = c_strlen (arg, 0, &lendata);
8645 if (len)
8646 return fold_convert_loc (loc, type, len);
8648 if (!lendata.decl)
8649 c_strlen (arg, 1, &lendata);
8651 if (lendata.decl)
8653 if (EXPR_HAS_LOCATION (arg))
8654 loc = EXPR_LOCATION (arg);
8655 else if (loc == UNKNOWN_LOCATION)
8656 loc = input_location;
8657 warn_string_no_nul (loc, "strlen", arg, lendata.decl);
8660 return NULL_TREE;
8664 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8666 static tree
8667 fold_builtin_inf (location_t loc, tree type, int warn)
8669 REAL_VALUE_TYPE real;
8671 /* __builtin_inff is intended to be usable to define INFINITY on all
8672 targets. If an infinity is not available, INFINITY expands "to a
8673 positive constant of type float that overflows at translation
8674 time", footnote "In this case, using INFINITY will violate the
8675 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8676 Thus we pedwarn to ensure this constraint violation is
8677 diagnosed. */
8678 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8679 pedwarn (loc, 0, "target format does not support infinity");
8681 real_inf (&real);
8682 return build_real (type, real);
8685 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8686 NULL_TREE if no simplification can be made. */
8688 static tree
8689 fold_builtin_sincos (location_t loc,
8690 tree arg0, tree arg1, tree arg2)
8692 tree type;
8693 tree fndecl, call = NULL_TREE;
8695 if (!validate_arg (arg0, REAL_TYPE)
8696 || !validate_arg (arg1, POINTER_TYPE)
8697 || !validate_arg (arg2, POINTER_TYPE))
8698 return NULL_TREE;
8700 type = TREE_TYPE (arg0);
8702 /* Calculate the result when the argument is a constant. */
8703 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8704 if (fn == END_BUILTINS)
8705 return NULL_TREE;
8707 /* Canonicalize sincos to cexpi. */
8708 if (TREE_CODE (arg0) == REAL_CST)
8710 tree complex_type = build_complex_type (type);
8711 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8713 if (!call)
8715 if (!targetm.libc_has_function (function_c99_math_complex)
8716 || !builtin_decl_implicit_p (fn))
8717 return NULL_TREE;
8718 fndecl = builtin_decl_explicit (fn);
8719 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8720 call = builtin_save_expr (call);
8723 tree ptype = build_pointer_type (type);
8724 arg1 = fold_convert (ptype, arg1);
8725 arg2 = fold_convert (ptype, arg2);
8726 return build2 (COMPOUND_EXPR, void_type_node,
8727 build2 (MODIFY_EXPR, void_type_node,
8728 build_fold_indirect_ref_loc (loc, arg1),
8729 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8730 build2 (MODIFY_EXPR, void_type_node,
8731 build_fold_indirect_ref_loc (loc, arg2),
8732 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8735 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8736 Return NULL_TREE if no simplification can be made. */
8738 static tree
8739 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8741 if (!validate_arg (arg1, POINTER_TYPE)
8742 || !validate_arg (arg2, POINTER_TYPE)
8743 || !validate_arg (len, INTEGER_TYPE))
8744 return NULL_TREE;
8746 /* If the LEN parameter is zero, return zero. */
8747 if (integer_zerop (len))
8748 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8749 arg1, arg2);
8751 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8752 if (operand_equal_p (arg1, arg2, 0))
8753 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8755 /* If len parameter is one, return an expression corresponding to
8756 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8757 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8759 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8760 tree cst_uchar_ptr_node
8761 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8763 tree ind1
8764 = fold_convert_loc (loc, integer_type_node,
8765 build1 (INDIRECT_REF, cst_uchar_node,
8766 fold_convert_loc (loc,
8767 cst_uchar_ptr_node,
8768 arg1)));
8769 tree ind2
8770 = fold_convert_loc (loc, integer_type_node,
8771 build1 (INDIRECT_REF, cst_uchar_node,
8772 fold_convert_loc (loc,
8773 cst_uchar_ptr_node,
8774 arg2)));
8775 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8778 return NULL_TREE;
8781 /* Fold a call to builtin isascii with argument ARG. */
8783 static tree
8784 fold_builtin_isascii (location_t loc, tree arg)
8786 if (!validate_arg (arg, INTEGER_TYPE))
8787 return NULL_TREE;
8788 else
8790 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8791 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8792 build_int_cst (integer_type_node,
8793 ~ (unsigned HOST_WIDE_INT) 0x7f));
8794 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8795 arg, integer_zero_node);
8799 /* Fold a call to builtin toascii with argument ARG. */
8801 static tree
8802 fold_builtin_toascii (location_t loc, tree arg)
8804 if (!validate_arg (arg, INTEGER_TYPE))
8805 return NULL_TREE;
8807 /* Transform toascii(c) -> (c & 0x7f). */
8808 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8809 build_int_cst (integer_type_node, 0x7f));
8812 /* Fold a call to builtin isdigit with argument ARG. */
8814 static tree
8815 fold_builtin_isdigit (location_t loc, tree arg)
8817 if (!validate_arg (arg, INTEGER_TYPE))
8818 return NULL_TREE;
8819 else
8821 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8822 /* According to the C standard, isdigit is unaffected by locale.
8823 However, it definitely is affected by the target character set. */
8824 unsigned HOST_WIDE_INT target_digit0
8825 = lang_hooks.to_target_charset ('0');
8827 if (target_digit0 == 0)
8828 return NULL_TREE;
8830 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8831 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8832 build_int_cst (unsigned_type_node, target_digit0));
8833 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8834 build_int_cst (unsigned_type_node, 9));
8838 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8840 static tree
8841 fold_builtin_fabs (location_t loc, tree arg, tree type)
8843 if (!validate_arg (arg, REAL_TYPE))
8844 return NULL_TREE;
8846 arg = fold_convert_loc (loc, type, arg);
8847 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8850 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8852 static tree
8853 fold_builtin_abs (location_t loc, tree arg, tree type)
8855 if (!validate_arg (arg, INTEGER_TYPE))
8856 return NULL_TREE;
8858 arg = fold_convert_loc (loc, type, arg);
8859 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8862 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8864 static tree
8865 fold_builtin_carg (location_t loc, tree arg, tree type)
8867 if (validate_arg (arg, COMPLEX_TYPE)
8868 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8870 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8872 if (atan2_fn)
8874 tree new_arg = builtin_save_expr (arg);
8875 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8876 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8877 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8881 return NULL_TREE;
8884 /* Fold a call to builtin frexp, we can assume the base is 2. */
8886 static tree
8887 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8889 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8890 return NULL_TREE;
8892 STRIP_NOPS (arg0);
8894 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8895 return NULL_TREE;
8897 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8899 /* Proceed if a valid pointer type was passed in. */
8900 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8902 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8903 tree frac, exp;
8905 switch (value->cl)
8907 case rvc_zero:
8908 /* For +-0, return (*exp = 0, +-0). */
8909 exp = integer_zero_node;
8910 frac = arg0;
8911 break;
8912 case rvc_nan:
8913 case rvc_inf:
8914 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8915 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8916 case rvc_normal:
8918 /* Since the frexp function always expects base 2, and in
8919 GCC normalized significands are already in the range
8920 [0.5, 1.0), we have exactly what frexp wants. */
8921 REAL_VALUE_TYPE frac_rvt = *value;
8922 SET_REAL_EXP (&frac_rvt, 0);
8923 frac = build_real (rettype, frac_rvt);
8924 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8926 break;
8927 default:
8928 gcc_unreachable ();
8931 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8932 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8933 TREE_SIDE_EFFECTS (arg1) = 1;
8934 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8937 return NULL_TREE;
8940 /* Fold a call to builtin modf. */
8942 static tree
8943 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8945 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8946 return NULL_TREE;
8948 STRIP_NOPS (arg0);
8950 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8951 return NULL_TREE;
8953 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8955 /* Proceed if a valid pointer type was passed in. */
8956 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8958 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8959 REAL_VALUE_TYPE trunc, frac;
8961 switch (value->cl)
8963 case rvc_nan:
8964 case rvc_zero:
8965 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8966 trunc = frac = *value;
8967 break;
8968 case rvc_inf:
8969 /* For +-Inf, return (*arg1 = arg0, +-0). */
8970 frac = dconst0;
8971 frac.sign = value->sign;
8972 trunc = *value;
8973 break;
8974 case rvc_normal:
8975 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8976 real_trunc (&trunc, VOIDmode, value);
8977 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8978 /* If the original number was negative and already
8979 integral, then the fractional part is -0.0. */
8980 if (value->sign && frac.cl == rvc_zero)
8981 frac.sign = value->sign;
8982 break;
8985 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8986 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8987 build_real (rettype, trunc));
8988 TREE_SIDE_EFFECTS (arg1) = 1;
8989 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8990 build_real (rettype, frac));
8993 return NULL_TREE;
8996 /* Given a location LOC, an interclass builtin function decl FNDECL
8997 and its single argument ARG, return an folded expression computing
8998 the same, or NULL_TREE if we either couldn't or didn't want to fold
8999 (the latter happen if there's an RTL instruction available). */
9001 static tree
9002 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9004 machine_mode mode;
9006 if (!validate_arg (arg, REAL_TYPE))
9007 return NULL_TREE;
9009 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9010 return NULL_TREE;
9012 mode = TYPE_MODE (TREE_TYPE (arg));
9014 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
9016 /* If there is no optab, try generic code. */
9017 switch (DECL_FUNCTION_CODE (fndecl))
9019 tree result;
9021 CASE_FLT_FN (BUILT_IN_ISINF):
9023 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9024 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9025 tree type = TREE_TYPE (arg);
9026 REAL_VALUE_TYPE r;
9027 char buf[128];
9029 if (is_ibm_extended)
9031 /* NaN and Inf are encoded in the high-order double value
9032 only. The low-order value is not significant. */
9033 type = double_type_node;
9034 mode = DFmode;
9035 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9037 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9038 real_from_string (&r, buf);
9039 result = build_call_expr (isgr_fn, 2,
9040 fold_build1_loc (loc, ABS_EXPR, type, arg),
9041 build_real (type, r));
9042 return result;
9044 CASE_FLT_FN (BUILT_IN_FINITE):
9045 case BUILT_IN_ISFINITE:
9047 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9048 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9049 tree type = TREE_TYPE (arg);
9050 REAL_VALUE_TYPE r;
9051 char buf[128];
9053 if (is_ibm_extended)
9055 /* NaN and Inf are encoded in the high-order double value
9056 only. The low-order value is not significant. */
9057 type = double_type_node;
9058 mode = DFmode;
9059 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9061 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9062 real_from_string (&r, buf);
9063 result = build_call_expr (isle_fn, 2,
9064 fold_build1_loc (loc, ABS_EXPR, type, arg),
9065 build_real (type, r));
9066 /*result = fold_build2_loc (loc, UNGT_EXPR,
9067 TREE_TYPE (TREE_TYPE (fndecl)),
9068 fold_build1_loc (loc, ABS_EXPR, type, arg),
9069 build_real (type, r));
9070 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9071 TREE_TYPE (TREE_TYPE (fndecl)),
9072 result);*/
9073 return result;
9075 case BUILT_IN_ISNORMAL:
9077 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9078 islessequal(fabs(x),DBL_MAX). */
9079 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9080 tree type = TREE_TYPE (arg);
9081 tree orig_arg, max_exp, min_exp;
9082 machine_mode orig_mode = mode;
9083 REAL_VALUE_TYPE rmax, rmin;
9084 char buf[128];
9086 orig_arg = arg = builtin_save_expr (arg);
9087 if (is_ibm_extended)
9089 /* Use double to test the normal range of IBM extended
9090 precision. Emin for IBM extended precision is
9091 different to emin for IEEE double, being 53 higher
9092 since the low double exponent is at least 53 lower
9093 than the high double exponent. */
9094 type = double_type_node;
9095 mode = DFmode;
9096 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9098 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
9100 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9101 real_from_string (&rmax, buf);
9102 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9103 real_from_string (&rmin, buf);
9104 max_exp = build_real (type, rmax);
9105 min_exp = build_real (type, rmin);
9107 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
9108 if (is_ibm_extended)
9110 /* Testing the high end of the range is done just using
9111 the high double, using the same test as isfinite().
9112 For the subnormal end of the range we first test the
9113 high double, then if its magnitude is equal to the
9114 limit of 0x1p-969, we test whether the low double is
9115 non-zero and opposite sign to the high double. */
9116 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
9117 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9118 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
9119 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
9120 arg, min_exp);
9121 tree as_complex = build1 (VIEW_CONVERT_EXPR,
9122 complex_double_type_node, orig_arg);
9123 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
9124 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
9125 tree zero = build_real (type, dconst0);
9126 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
9127 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
9128 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
9129 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
9130 fold_build3 (COND_EXPR,
9131 integer_type_node,
9132 hilt, logt, lolt));
9133 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
9134 eq_min, ok_lo);
9135 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
9136 gt_min, eq_min);
9138 else
9140 tree const isge_fn
9141 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9142 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
9144 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
9145 max_exp, min_exp);
9146 return result;
9148 default:
9149 break;
9152 return NULL_TREE;
9155 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9156 ARG is the argument for the call. */
9158 static tree
9159 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9161 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9163 if (!validate_arg (arg, REAL_TYPE))
9164 return NULL_TREE;
9166 switch (builtin_index)
9168 case BUILT_IN_ISINF:
9169 if (!HONOR_INFINITIES (arg))
9170 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9172 return NULL_TREE;
9174 case BUILT_IN_ISINF_SIGN:
9176 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9177 /* In a boolean context, GCC will fold the inner COND_EXPR to
9178 1. So e.g. "if (isinf_sign(x))" would be folded to just
9179 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9180 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
9181 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9182 tree tmp = NULL_TREE;
9184 arg = builtin_save_expr (arg);
9186 if (signbit_fn && isinf_fn)
9188 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9189 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9191 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9192 signbit_call, integer_zero_node);
9193 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9194 isinf_call, integer_zero_node);
9196 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9197 integer_minus_one_node, integer_one_node);
9198 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9199 isinf_call, tmp,
9200 integer_zero_node);
9203 return tmp;
9206 case BUILT_IN_ISFINITE:
9207 if (!HONOR_NANS (arg)
9208 && !HONOR_INFINITIES (arg))
9209 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9211 return NULL_TREE;
9213 case BUILT_IN_ISNAN:
9214 if (!HONOR_NANS (arg))
9215 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9218 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
9219 if (is_ibm_extended)
9221 /* NaN and Inf are encoded in the high-order double value
9222 only. The low-order value is not significant. */
9223 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9226 arg = builtin_save_expr (arg);
9227 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9229 default:
9230 gcc_unreachable ();
9234 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9235 This builtin will generate code to return the appropriate floating
9236 point classification depending on the value of the floating point
9237 number passed in. The possible return values must be supplied as
9238 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9239 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9240 one floating point argument which is "type generic". */
9242 static tree
9243 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9245 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9246 arg, type, res, tmp;
9247 machine_mode mode;
9248 REAL_VALUE_TYPE r;
9249 char buf[128];
9251 /* Verify the required arguments in the original call. */
9252 if (nargs != 6
9253 || !validate_arg (args[0], INTEGER_TYPE)
9254 || !validate_arg (args[1], INTEGER_TYPE)
9255 || !validate_arg (args[2], INTEGER_TYPE)
9256 || !validate_arg (args[3], INTEGER_TYPE)
9257 || !validate_arg (args[4], INTEGER_TYPE)
9258 || !validate_arg (args[5], REAL_TYPE))
9259 return NULL_TREE;
9261 fp_nan = args[0];
9262 fp_infinite = args[1];
9263 fp_normal = args[2];
9264 fp_subnormal = args[3];
9265 fp_zero = args[4];
9266 arg = args[5];
9267 type = TREE_TYPE (arg);
9268 mode = TYPE_MODE (type);
9269 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9271 /* fpclassify(x) ->
9272 isnan(x) ? FP_NAN :
9273 (fabs(x) == Inf ? FP_INFINITE :
9274 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9275 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9277 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9278 build_real (type, dconst0));
9279 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9280 tmp, fp_zero, fp_subnormal);
9282 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9283 real_from_string (&r, buf);
9284 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9285 arg, build_real (type, r));
9286 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9288 if (HONOR_INFINITIES (mode))
9290 real_inf (&r);
9291 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9292 build_real (type, r));
9293 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9294 fp_infinite, res);
9297 if (HONOR_NANS (mode))
9299 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9300 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9303 return res;
9306 /* Fold a call to an unordered comparison function such as
9307 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9308 being called and ARG0 and ARG1 are the arguments for the call.
9309 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9310 the opposite of the desired result. UNORDERED_CODE is used
9311 for modes that can hold NaNs and ORDERED_CODE is used for
9312 the rest. */
9314 static tree
9315 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9316 enum tree_code unordered_code,
9317 enum tree_code ordered_code)
9319 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9320 enum tree_code code;
9321 tree type0, type1;
9322 enum tree_code code0, code1;
9323 tree cmp_type = NULL_TREE;
9325 type0 = TREE_TYPE (arg0);
9326 type1 = TREE_TYPE (arg1);
9328 code0 = TREE_CODE (type0);
9329 code1 = TREE_CODE (type1);
9331 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9332 /* Choose the wider of two real types. */
9333 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9334 ? type0 : type1;
9335 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9336 cmp_type = type0;
9337 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9338 cmp_type = type1;
9340 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9341 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9343 if (unordered_code == UNORDERED_EXPR)
9345 if (!HONOR_NANS (arg0))
9346 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9347 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9350 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9351 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9352 fold_build2_loc (loc, code, type, arg0, arg1));
9355 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9356 arithmetics if it can never overflow, or into internal functions that
9357 return both result of arithmetics and overflowed boolean flag in
9358 a complex integer result, or some other check for overflow.
9359 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9360 checking part of that. */
9362 static tree
9363 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9364 tree arg0, tree arg1, tree arg2)
9366 enum internal_fn ifn = IFN_LAST;
9367 /* The code of the expression corresponding to the built-in. */
9368 enum tree_code opcode = ERROR_MARK;
9369 bool ovf_only = false;
9371 switch (fcode)
9373 case BUILT_IN_ADD_OVERFLOW_P:
9374 ovf_only = true;
9375 /* FALLTHRU */
9376 case BUILT_IN_ADD_OVERFLOW:
9377 case BUILT_IN_SADD_OVERFLOW:
9378 case BUILT_IN_SADDL_OVERFLOW:
9379 case BUILT_IN_SADDLL_OVERFLOW:
9380 case BUILT_IN_UADD_OVERFLOW:
9381 case BUILT_IN_UADDL_OVERFLOW:
9382 case BUILT_IN_UADDLL_OVERFLOW:
9383 opcode = PLUS_EXPR;
9384 ifn = IFN_ADD_OVERFLOW;
9385 break;
9386 case BUILT_IN_SUB_OVERFLOW_P:
9387 ovf_only = true;
9388 /* FALLTHRU */
9389 case BUILT_IN_SUB_OVERFLOW:
9390 case BUILT_IN_SSUB_OVERFLOW:
9391 case BUILT_IN_SSUBL_OVERFLOW:
9392 case BUILT_IN_SSUBLL_OVERFLOW:
9393 case BUILT_IN_USUB_OVERFLOW:
9394 case BUILT_IN_USUBL_OVERFLOW:
9395 case BUILT_IN_USUBLL_OVERFLOW:
9396 opcode = MINUS_EXPR;
9397 ifn = IFN_SUB_OVERFLOW;
9398 break;
9399 case BUILT_IN_MUL_OVERFLOW_P:
9400 ovf_only = true;
9401 /* FALLTHRU */
9402 case BUILT_IN_MUL_OVERFLOW:
9403 case BUILT_IN_SMUL_OVERFLOW:
9404 case BUILT_IN_SMULL_OVERFLOW:
9405 case BUILT_IN_SMULLL_OVERFLOW:
9406 case BUILT_IN_UMUL_OVERFLOW:
9407 case BUILT_IN_UMULL_OVERFLOW:
9408 case BUILT_IN_UMULLL_OVERFLOW:
9409 opcode = MULT_EXPR;
9410 ifn = IFN_MUL_OVERFLOW;
9411 break;
9412 default:
9413 gcc_unreachable ();
9416 /* For the "generic" overloads, the first two arguments can have different
9417 types and the last argument determines the target type to use to check
9418 for overflow. The arguments of the other overloads all have the same
9419 type. */
9420 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9422 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9423 arguments are constant, attempt to fold the built-in call into a constant
9424 expression indicating whether or not it detected an overflow. */
9425 if (ovf_only
9426 && TREE_CODE (arg0) == INTEGER_CST
9427 && TREE_CODE (arg1) == INTEGER_CST)
9428 /* Perform the computation in the target type and check for overflow. */
9429 return omit_one_operand_loc (loc, boolean_type_node,
9430 arith_overflowed_p (opcode, type, arg0, arg1)
9431 ? boolean_true_node : boolean_false_node,
9432 arg2);
9434 tree intres, ovfres;
9435 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9437 intres = fold_binary_loc (loc, opcode, type,
9438 fold_convert_loc (loc, type, arg0),
9439 fold_convert_loc (loc, type, arg1));
9440 if (TREE_OVERFLOW (intres))
9441 intres = drop_tree_overflow (intres);
9442 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
9443 ? boolean_true_node : boolean_false_node);
9445 else
9447 tree ctype = build_complex_type (type);
9448 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
9449 arg0, arg1);
9450 tree tgt = save_expr (call);
9451 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9452 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9453 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9456 if (ovf_only)
9457 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9459 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9460 tree store
9461 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9462 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9465 /* Fold a call to __builtin_FILE to a constant string. */
9467 static inline tree
9468 fold_builtin_FILE (location_t loc)
9470 if (const char *fname = LOCATION_FILE (loc))
9472 /* The documentation says this builtin is equivalent to the preprocessor
9473 __FILE__ macro so it appears appropriate to use the same file prefix
9474 mappings. */
9475 fname = remap_macro_filename (fname);
9476 return build_string_literal (strlen (fname) + 1, fname);
9479 return build_string_literal (1, "");
9482 /* Fold a call to __builtin_FUNCTION to a constant string. */
9484 static inline tree
9485 fold_builtin_FUNCTION ()
9487 const char *name = "";
9489 if (current_function_decl)
9490 name = lang_hooks.decl_printable_name (current_function_decl, 0);
9492 return build_string_literal (strlen (name) + 1, name);
9495 /* Fold a call to __builtin_LINE to an integer constant. */
9497 static inline tree
9498 fold_builtin_LINE (location_t loc, tree type)
9500 return build_int_cst (type, LOCATION_LINE (loc));
9503 /* Fold a call to built-in function FNDECL with 0 arguments.
9504 This function returns NULL_TREE if no simplification was possible. */
9506 static tree
9507 fold_builtin_0 (location_t loc, tree fndecl)
9509 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9510 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9511 switch (fcode)
9513 case BUILT_IN_FILE:
9514 return fold_builtin_FILE (loc);
9516 case BUILT_IN_FUNCTION:
9517 return fold_builtin_FUNCTION ();
9519 case BUILT_IN_LINE:
9520 return fold_builtin_LINE (loc, type);
9522 CASE_FLT_FN (BUILT_IN_INF):
9523 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
9524 case BUILT_IN_INFD32:
9525 case BUILT_IN_INFD64:
9526 case BUILT_IN_INFD128:
9527 return fold_builtin_inf (loc, type, true);
9529 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9530 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
9531 return fold_builtin_inf (loc, type, false);
9533 case BUILT_IN_CLASSIFY_TYPE:
9534 return fold_builtin_classify_type (NULL_TREE);
9536 default:
9537 break;
9539 return NULL_TREE;
9542 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9543 This function returns NULL_TREE if no simplification was possible. */
9545 static tree
9546 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9548 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9549 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9551 if (TREE_CODE (arg0) == ERROR_MARK)
9552 return NULL_TREE;
9554 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
9555 return ret;
9557 switch (fcode)
9559 case BUILT_IN_CONSTANT_P:
9561 tree val = fold_builtin_constant_p (arg0);
9563 /* Gimplification will pull the CALL_EXPR for the builtin out of
9564 an if condition. When not optimizing, we'll not CSE it back.
9565 To avoid link error types of regressions, return false now. */
9566 if (!val && !optimize)
9567 val = integer_zero_node;
9569 return val;
9572 case BUILT_IN_CLASSIFY_TYPE:
9573 return fold_builtin_classify_type (arg0);
9575 case BUILT_IN_STRLEN:
9576 return fold_builtin_strlen (loc, type, arg0);
9578 CASE_FLT_FN (BUILT_IN_FABS):
9579 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9580 case BUILT_IN_FABSD32:
9581 case BUILT_IN_FABSD64:
9582 case BUILT_IN_FABSD128:
9583 return fold_builtin_fabs (loc, arg0, type);
9585 case BUILT_IN_ABS:
9586 case BUILT_IN_LABS:
9587 case BUILT_IN_LLABS:
9588 case BUILT_IN_IMAXABS:
9589 return fold_builtin_abs (loc, arg0, type);
9591 CASE_FLT_FN (BUILT_IN_CONJ):
9592 if (validate_arg (arg0, COMPLEX_TYPE)
9593 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9594 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9595 break;
9597 CASE_FLT_FN (BUILT_IN_CREAL):
9598 if (validate_arg (arg0, COMPLEX_TYPE)
9599 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9600 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9601 break;
9603 CASE_FLT_FN (BUILT_IN_CIMAG):
9604 if (validate_arg (arg0, COMPLEX_TYPE)
9605 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9606 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9607 break;
9609 CASE_FLT_FN (BUILT_IN_CARG):
9610 return fold_builtin_carg (loc, arg0, type);
9612 case BUILT_IN_ISASCII:
9613 return fold_builtin_isascii (loc, arg0);
9615 case BUILT_IN_TOASCII:
9616 return fold_builtin_toascii (loc, arg0);
9618 case BUILT_IN_ISDIGIT:
9619 return fold_builtin_isdigit (loc, arg0);
9621 CASE_FLT_FN (BUILT_IN_FINITE):
9622 case BUILT_IN_FINITED32:
9623 case BUILT_IN_FINITED64:
9624 case BUILT_IN_FINITED128:
9625 case BUILT_IN_ISFINITE:
9627 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9628 if (ret)
9629 return ret;
9630 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9633 CASE_FLT_FN (BUILT_IN_ISINF):
9634 case BUILT_IN_ISINFD32:
9635 case BUILT_IN_ISINFD64:
9636 case BUILT_IN_ISINFD128:
9638 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9639 if (ret)
9640 return ret;
9641 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9644 case BUILT_IN_ISNORMAL:
9645 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9647 case BUILT_IN_ISINF_SIGN:
9648 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9650 CASE_FLT_FN (BUILT_IN_ISNAN):
9651 case BUILT_IN_ISNAND32:
9652 case BUILT_IN_ISNAND64:
9653 case BUILT_IN_ISNAND128:
9654 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9656 case BUILT_IN_FREE:
9657 if (integer_zerop (arg0))
9658 return build_empty_stmt (loc);
9659 break;
9661 default:
9662 break;
9665 return NULL_TREE;
9669 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9670 This function returns NULL_TREE if no simplification was possible. */
9672 static tree
9673 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
9675 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9676 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9678 if (TREE_CODE (arg0) == ERROR_MARK
9679 || TREE_CODE (arg1) == ERROR_MARK)
9680 return NULL_TREE;
9682 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9683 return ret;
9685 switch (fcode)
9687 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9688 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9689 if (validate_arg (arg0, REAL_TYPE)
9690 && validate_arg (arg1, POINTER_TYPE))
9691 return do_mpfr_lgamma_r (arg0, arg1, type);
9692 break;
9694 CASE_FLT_FN (BUILT_IN_FREXP):
9695 return fold_builtin_frexp (loc, arg0, arg1, type);
9697 CASE_FLT_FN (BUILT_IN_MODF):
9698 return fold_builtin_modf (loc, arg0, arg1, type);
9700 case BUILT_IN_STRSPN:
9701 return fold_builtin_strspn (loc, arg0, arg1);
9703 case BUILT_IN_STRCSPN:
9704 return fold_builtin_strcspn (loc, arg0, arg1);
9706 case BUILT_IN_STRPBRK:
9707 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9709 case BUILT_IN_EXPECT:
9710 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
9712 case BUILT_IN_ISGREATER:
9713 return fold_builtin_unordered_cmp (loc, fndecl,
9714 arg0, arg1, UNLE_EXPR, LE_EXPR);
9715 case BUILT_IN_ISGREATEREQUAL:
9716 return fold_builtin_unordered_cmp (loc, fndecl,
9717 arg0, arg1, UNLT_EXPR, LT_EXPR);
9718 case BUILT_IN_ISLESS:
9719 return fold_builtin_unordered_cmp (loc, fndecl,
9720 arg0, arg1, UNGE_EXPR, GE_EXPR);
9721 case BUILT_IN_ISLESSEQUAL:
9722 return fold_builtin_unordered_cmp (loc, fndecl,
9723 arg0, arg1, UNGT_EXPR, GT_EXPR);
9724 case BUILT_IN_ISLESSGREATER:
9725 return fold_builtin_unordered_cmp (loc, fndecl,
9726 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9727 case BUILT_IN_ISUNORDERED:
9728 return fold_builtin_unordered_cmp (loc, fndecl,
9729 arg0, arg1, UNORDERED_EXPR,
9730 NOP_EXPR);
9732 /* We do the folding for va_start in the expander. */
9733 case BUILT_IN_VA_START:
9734 break;
9736 case BUILT_IN_OBJECT_SIZE:
9737 return fold_builtin_object_size (arg0, arg1);
9739 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9740 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9742 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9743 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9745 default:
9746 break;
9748 return NULL_TREE;
9751 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9752 and ARG2.
9753 This function returns NULL_TREE if no simplification was possible. */
9755 static tree
9756 fold_builtin_3 (location_t loc, tree fndecl,
9757 tree arg0, tree arg1, tree arg2)
9759 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9760 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9762 if (TREE_CODE (arg0) == ERROR_MARK
9763 || TREE_CODE (arg1) == ERROR_MARK
9764 || TREE_CODE (arg2) == ERROR_MARK)
9765 return NULL_TREE;
9767 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9768 arg0, arg1, arg2))
9769 return ret;
9771 switch (fcode)
9774 CASE_FLT_FN (BUILT_IN_SINCOS):
9775 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9777 CASE_FLT_FN (BUILT_IN_REMQUO):
9778 if (validate_arg (arg0, REAL_TYPE)
9779 && validate_arg (arg1, REAL_TYPE)
9780 && validate_arg (arg2, POINTER_TYPE))
9781 return do_mpfr_remquo (arg0, arg1, arg2);
9782 break;
9784 case BUILT_IN_MEMCMP:
9785 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9787 case BUILT_IN_EXPECT:
9788 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
9790 case BUILT_IN_EXPECT_WITH_PROBABILITY:
9791 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
9793 case BUILT_IN_ADD_OVERFLOW:
9794 case BUILT_IN_SUB_OVERFLOW:
9795 case BUILT_IN_MUL_OVERFLOW:
9796 case BUILT_IN_ADD_OVERFLOW_P:
9797 case BUILT_IN_SUB_OVERFLOW_P:
9798 case BUILT_IN_MUL_OVERFLOW_P:
9799 case BUILT_IN_SADD_OVERFLOW:
9800 case BUILT_IN_SADDL_OVERFLOW:
9801 case BUILT_IN_SADDLL_OVERFLOW:
9802 case BUILT_IN_SSUB_OVERFLOW:
9803 case BUILT_IN_SSUBL_OVERFLOW:
9804 case BUILT_IN_SSUBLL_OVERFLOW:
9805 case BUILT_IN_SMUL_OVERFLOW:
9806 case BUILT_IN_SMULL_OVERFLOW:
9807 case BUILT_IN_SMULLL_OVERFLOW:
9808 case BUILT_IN_UADD_OVERFLOW:
9809 case BUILT_IN_UADDL_OVERFLOW:
9810 case BUILT_IN_UADDLL_OVERFLOW:
9811 case BUILT_IN_USUB_OVERFLOW:
9812 case BUILT_IN_USUBL_OVERFLOW:
9813 case BUILT_IN_USUBLL_OVERFLOW:
9814 case BUILT_IN_UMUL_OVERFLOW:
9815 case BUILT_IN_UMULL_OVERFLOW:
9816 case BUILT_IN_UMULLL_OVERFLOW:
9817 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9819 default:
9820 break;
9822 return NULL_TREE;
9825 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9826 arguments. IGNORE is true if the result of the
9827 function call is ignored. This function returns NULL_TREE if no
9828 simplification was possible. */
9830 tree
9831 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9833 tree ret = NULL_TREE;
9835 switch (nargs)
9837 case 0:
9838 ret = fold_builtin_0 (loc, fndecl);
9839 break;
9840 case 1:
9841 ret = fold_builtin_1 (loc, fndecl, args[0]);
9842 break;
9843 case 2:
9844 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9845 break;
9846 case 3:
9847 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9848 break;
9849 default:
9850 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9851 break;
9853 if (ret)
9855 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9856 SET_EXPR_LOCATION (ret, loc);
9857 return ret;
9859 return NULL_TREE;
9862 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9863 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9864 of arguments in ARGS to be omitted. OLDNARGS is the number of
9865 elements in ARGS. */
9867 static tree
9868 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9869 int skip, tree fndecl, int n, va_list newargs)
9871 int nargs = oldnargs - skip + n;
9872 tree *buffer;
9874 if (n > 0)
9876 int i, j;
9878 buffer = XALLOCAVEC (tree, nargs);
9879 for (i = 0; i < n; i++)
9880 buffer[i] = va_arg (newargs, tree);
9881 for (j = skip; j < oldnargs; j++, i++)
9882 buffer[i] = args[j];
9884 else
9885 buffer = args + skip;
9887 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9890 /* Return true if FNDECL shouldn't be folded right now.
9891 If a built-in function has an inline attribute always_inline
9892 wrapper, defer folding it after always_inline functions have
9893 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9894 might not be performed. */
9896 bool
9897 avoid_folding_inline_builtin (tree fndecl)
9899 return (DECL_DECLARED_INLINE_P (fndecl)
9900 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9901 && cfun
9902 && !cfun->always_inline_functions_inlined
9903 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9906 /* A wrapper function for builtin folding that prevents warnings for
9907 "statement without effect" and the like, caused by removing the
9908 call node earlier than the warning is generated. */
9910 tree
9911 fold_call_expr (location_t loc, tree exp, bool ignore)
9913 tree ret = NULL_TREE;
9914 tree fndecl = get_callee_fndecl (exp);
9915 if (fndecl && fndecl_built_in_p (fndecl)
9916 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9917 yet. Defer folding until we see all the arguments
9918 (after inlining). */
9919 && !CALL_EXPR_VA_ARG_PACK (exp))
9921 int nargs = call_expr_nargs (exp);
9923 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9924 instead last argument is __builtin_va_arg_pack (). Defer folding
9925 even in that case, until arguments are finalized. */
9926 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9928 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9929 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9930 return NULL_TREE;
9933 if (avoid_folding_inline_builtin (fndecl))
9934 return NULL_TREE;
9936 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9937 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9938 CALL_EXPR_ARGP (exp), ignore);
9939 else
9941 tree *args = CALL_EXPR_ARGP (exp);
9942 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9943 if (ret)
9944 return ret;
9947 return NULL_TREE;
9950 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9951 N arguments are passed in the array ARGARRAY. Return a folded
9952 expression or NULL_TREE if no simplification was possible. */
9954 tree
9955 fold_builtin_call_array (location_t loc, tree,
9956 tree fn,
9957 int n,
9958 tree *argarray)
9960 if (TREE_CODE (fn) != ADDR_EXPR)
9961 return NULL_TREE;
9963 tree fndecl = TREE_OPERAND (fn, 0);
9964 if (TREE_CODE (fndecl) == FUNCTION_DECL
9965 && fndecl_built_in_p (fndecl))
9967 /* If last argument is __builtin_va_arg_pack (), arguments to this
9968 function are not finalized yet. Defer folding until they are. */
9969 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9971 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9972 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9973 return NULL_TREE;
9975 if (avoid_folding_inline_builtin (fndecl))
9976 return NULL_TREE;
9977 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9978 return targetm.fold_builtin (fndecl, n, argarray, false);
9979 else
9980 return fold_builtin_n (loc, fndecl, argarray, n, false);
9983 return NULL_TREE;
9986 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9987 along with N new arguments specified as the "..." parameters. SKIP
9988 is the number of arguments in EXP to be omitted. This function is used
9989 to do varargs-to-varargs transformations. */
9991 static tree
9992 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9994 va_list ap;
9995 tree t;
9997 va_start (ap, n);
9998 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9999 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10000 va_end (ap);
10002 return t;
10005 /* Validate a single argument ARG against a tree code CODE representing
10006 a type. Return true when argument is valid. */
10008 static bool
10009 validate_arg (const_tree arg, enum tree_code code)
10011 if (!arg)
10012 return false;
10013 else if (code == POINTER_TYPE)
10014 return POINTER_TYPE_P (TREE_TYPE (arg));
10015 else if (code == INTEGER_TYPE)
10016 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10017 return code == TREE_CODE (TREE_TYPE (arg));
10020 /* This function validates the types of a function call argument list
10021 against a specified list of tree_codes. If the last specifier is a 0,
10022 that represents an ellipses, otherwise the last specifier must be a
10023 VOID_TYPE.
10025 This is the GIMPLE version of validate_arglist. Eventually we want to
10026 completely convert builtins.c to work from GIMPLEs and the tree based
10027 validate_arglist will then be removed. */
10029 bool
10030 validate_gimple_arglist (const gcall *call, ...)
10032 enum tree_code code;
10033 bool res = 0;
10034 va_list ap;
10035 const_tree arg;
10036 size_t i;
10038 va_start (ap, call);
10039 i = 0;
10043 code = (enum tree_code) va_arg (ap, int);
10044 switch (code)
10046 case 0:
10047 /* This signifies an ellipses, any further arguments are all ok. */
10048 res = true;
10049 goto end;
10050 case VOID_TYPE:
10051 /* This signifies an endlink, if no arguments remain, return
10052 true, otherwise return false. */
10053 res = (i == gimple_call_num_args (call));
10054 goto end;
10055 default:
10056 /* If no parameters remain or the parameter's code does not
10057 match the specified code, return false. Otherwise continue
10058 checking any remaining arguments. */
10059 arg = gimple_call_arg (call, i++);
10060 if (!validate_arg (arg, code))
10061 goto end;
10062 break;
10065 while (1);
10067 /* We need gotos here since we can only have one VA_CLOSE in a
10068 function. */
10069 end: ;
10070 va_end (ap);
10072 return res;
10075 /* Default target-specific builtin expander that does nothing. */
10078 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10079 rtx target ATTRIBUTE_UNUSED,
10080 rtx subtarget ATTRIBUTE_UNUSED,
10081 machine_mode mode ATTRIBUTE_UNUSED,
10082 int ignore ATTRIBUTE_UNUSED)
10084 return NULL_RTX;
10087 /* Returns true is EXP represents data that would potentially reside
10088 in a readonly section. */
10090 bool
10091 readonly_data_expr (tree exp)
10093 STRIP_NOPS (exp);
10095 if (TREE_CODE (exp) != ADDR_EXPR)
10096 return false;
10098 exp = get_base_address (TREE_OPERAND (exp, 0));
10099 if (!exp)
10100 return false;
10102 /* Make sure we call decl_readonly_section only for trees it
10103 can handle (since it returns true for everything it doesn't
10104 understand). */
10105 if (TREE_CODE (exp) == STRING_CST
10106 || TREE_CODE (exp) == CONSTRUCTOR
10107 || (VAR_P (exp) && TREE_STATIC (exp)))
10108 return decl_readonly_section (exp, 0);
10109 else
10110 return false;
10113 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10114 to the call, and TYPE is its return type.
10116 Return NULL_TREE if no simplification was possible, otherwise return the
10117 simplified form of the call as a tree.
10119 The simplified form may be a constant or other expression which
10120 computes the same value, but in a more efficient manner (including
10121 calls to other builtin functions).
10123 The call may contain arguments which need to be evaluated, but
10124 which are not useful to determine the result of the call. In
10125 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10126 COMPOUND_EXPR will be an argument which must be evaluated.
10127 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10128 COMPOUND_EXPR in the chain will contain the tree for the simplified
10129 form of the builtin function call. */
10131 static tree
10132 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
10134 if (!validate_arg (s1, POINTER_TYPE)
10135 || !validate_arg (s2, POINTER_TYPE))
10136 return NULL_TREE;
10137 else
10139 tree fn;
10140 const char *p1, *p2;
10142 p2 = c_getstr (s2);
10143 if (p2 == NULL)
10144 return NULL_TREE;
10146 p1 = c_getstr (s1);
10147 if (p1 != NULL)
10149 const char *r = strpbrk (p1, p2);
10150 tree tem;
10152 if (r == NULL)
10153 return build_int_cst (TREE_TYPE (s1), 0);
10155 /* Return an offset into the constant string argument. */
10156 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10157 return fold_convert_loc (loc, type, tem);
10160 if (p2[0] == '\0')
10161 /* strpbrk(x, "") == NULL.
10162 Evaluate and ignore s1 in case it had side-effects. */
10163 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
10165 if (p2[1] != '\0')
10166 return NULL_TREE; /* Really call strpbrk. */
10168 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10169 if (!fn)
10170 return NULL_TREE;
10172 /* New argument list transforming strpbrk(s1, s2) to
10173 strchr(s1, s2[0]). */
10174 return build_call_expr_loc (loc, fn, 2, s1,
10175 build_int_cst (integer_type_node, p2[0]));
10179 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10180 to the call.
10182 Return NULL_TREE if no simplification was possible, otherwise return the
10183 simplified form of the call as a tree.
10185 The simplified form may be a constant or other expression which
10186 computes the same value, but in a more efficient manner (including
10187 calls to other builtin functions).
10189 The call may contain arguments which need to be evaluated, but
10190 which are not useful to determine the result of the call. In
10191 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10192 COMPOUND_EXPR will be an argument which must be evaluated.
10193 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10194 COMPOUND_EXPR in the chain will contain the tree for the simplified
10195 form of the builtin function call. */
10197 static tree
10198 fold_builtin_strspn (location_t loc, tree s1, tree s2)
10200 if (!validate_arg (s1, POINTER_TYPE)
10201 || !validate_arg (s2, POINTER_TYPE))
10202 return NULL_TREE;
10203 else
10205 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10207 /* If either argument is "", return NULL_TREE. */
10208 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10209 /* Evaluate and ignore both arguments in case either one has
10210 side-effects. */
10211 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
10212 s1, s2);
10213 return NULL_TREE;
10217 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10218 to the call.
10220 Return NULL_TREE if no simplification was possible, otherwise return the
10221 simplified form of the call as a tree.
10223 The simplified form may be a constant or other expression which
10224 computes the same value, but in a more efficient manner (including
10225 calls to other builtin functions).
10227 The call may contain arguments which need to be evaluated, but
10228 which are not useful to determine the result of the call. In
10229 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10230 COMPOUND_EXPR will be an argument which must be evaluated.
10231 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10232 COMPOUND_EXPR in the chain will contain the tree for the simplified
10233 form of the builtin function call. */
10235 static tree
10236 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
10238 if (!validate_arg (s1, POINTER_TYPE)
10239 || !validate_arg (s2, POINTER_TYPE))
10240 return NULL_TREE;
10241 else
10243 /* If the first argument is "", return NULL_TREE. */
10244 const char *p1 = c_getstr (s1);
10245 if (p1 && *p1 == '\0')
10247 /* Evaluate and ignore argument s2 in case it has
10248 side-effects. */
10249 return omit_one_operand_loc (loc, size_type_node,
10250 size_zero_node, s2);
10253 /* If the second argument is "", return __builtin_strlen(s1). */
10254 const char *p2 = c_getstr (s2);
10255 if (p2 && *p2 == '\0')
10257 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
10259 /* If the replacement _DECL isn't initialized, don't do the
10260 transformation. */
10261 if (!fn)
10262 return NULL_TREE;
10264 return build_call_expr_loc (loc, fn, 1, s1);
10266 return NULL_TREE;
10270 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10271 produced. False otherwise. This is done so that we don't output the error
10272 or warning twice or three times. */
10274 bool
10275 fold_builtin_next_arg (tree exp, bool va_start_p)
10277 tree fntype = TREE_TYPE (current_function_decl);
10278 int nargs = call_expr_nargs (exp);
10279 tree arg;
10280 /* There is good chance the current input_location points inside the
10281 definition of the va_start macro (perhaps on the token for
10282 builtin) in a system header, so warnings will not be emitted.
10283 Use the location in real source code. */
10284 location_t current_location =
10285 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10286 NULL);
10288 if (!stdarg_p (fntype))
10290 error ("%<va_start%> used in function with fixed arguments");
10291 return true;
10294 if (va_start_p)
10296 if (va_start_p && (nargs != 2))
10298 error ("wrong number of arguments to function %<va_start%>");
10299 return true;
10301 arg = CALL_EXPR_ARG (exp, 1);
10303 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10304 when we checked the arguments and if needed issued a warning. */
10305 else
10307 if (nargs == 0)
10309 /* Evidently an out of date version of <stdarg.h>; can't validate
10310 va_start's second argument, but can still work as intended. */
10311 warning_at (current_location,
10312 OPT_Wvarargs,
10313 "%<__builtin_next_arg%> called without an argument");
10314 return true;
10316 else if (nargs > 1)
10318 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10319 return true;
10321 arg = CALL_EXPR_ARG (exp, 0);
10324 if (TREE_CODE (arg) == SSA_NAME)
10325 arg = SSA_NAME_VAR (arg);
10327 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10328 or __builtin_next_arg (0) the first time we see it, after checking
10329 the arguments and if needed issuing a warning. */
10330 if (!integer_zerop (arg))
10332 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10334 /* Strip off all nops for the sake of the comparison. This
10335 is not quite the same as STRIP_NOPS. It does more.
10336 We must also strip off INDIRECT_EXPR for C++ reference
10337 parameters. */
10338 while (CONVERT_EXPR_P (arg)
10339 || TREE_CODE (arg) == INDIRECT_REF)
10340 arg = TREE_OPERAND (arg, 0);
10341 if (arg != last_parm)
10343 /* FIXME: Sometimes with the tree optimizers we can get the
10344 not the last argument even though the user used the last
10345 argument. We just warn and set the arg to be the last
10346 argument so that we will get wrong-code because of
10347 it. */
10348 warning_at (current_location,
10349 OPT_Wvarargs,
10350 "second parameter of %<va_start%> not last named argument");
10353 /* Undefined by C99 7.15.1.4p4 (va_start):
10354 "If the parameter parmN is declared with the register storage
10355 class, with a function or array type, or with a type that is
10356 not compatible with the type that results after application of
10357 the default argument promotions, the behavior is undefined."
10359 else if (DECL_REGISTER (arg))
10361 warning_at (current_location,
10362 OPT_Wvarargs,
10363 "undefined behavior when second parameter of "
10364 "%<va_start%> is declared with %<register%> storage");
10367 /* We want to verify the second parameter just once before the tree
10368 optimizers are run and then avoid keeping it in the tree,
10369 as otherwise we could warn even for correct code like:
10370 void foo (int i, ...)
10371 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10372 if (va_start_p)
10373 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10374 else
10375 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10377 return false;
10381 /* Expand a call EXP to __builtin_object_size. */
10383 static rtx
10384 expand_builtin_object_size (tree exp)
10386 tree ost;
10387 int object_size_type;
10388 tree fndecl = get_callee_fndecl (exp);
10390 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10392 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
10393 exp, fndecl);
10394 expand_builtin_trap ();
10395 return const0_rtx;
10398 ost = CALL_EXPR_ARG (exp, 1);
10399 STRIP_NOPS (ost);
10401 if (TREE_CODE (ost) != INTEGER_CST
10402 || tree_int_cst_sgn (ost) < 0
10403 || compare_tree_int (ost, 3) > 0)
10405 error ("%Klast argument of %qD is not integer constant between 0 and 3",
10406 exp, fndecl);
10407 expand_builtin_trap ();
10408 return const0_rtx;
10411 object_size_type = tree_to_shwi (ost);
10413 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10416 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10417 FCODE is the BUILT_IN_* to use.
10418 Return NULL_RTX if we failed; the caller should emit a normal call,
10419 otherwise try to get the result in TARGET, if convenient (and in
10420 mode MODE if that's convenient). */
10422 static rtx
10423 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10424 enum built_in_function fcode)
10426 if (!validate_arglist (exp,
10427 POINTER_TYPE,
10428 fcode == BUILT_IN_MEMSET_CHK
10429 ? INTEGER_TYPE : POINTER_TYPE,
10430 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10431 return NULL_RTX;
10433 tree dest = CALL_EXPR_ARG (exp, 0);
10434 tree src = CALL_EXPR_ARG (exp, 1);
10435 tree len = CALL_EXPR_ARG (exp, 2);
10436 tree size = CALL_EXPR_ARG (exp, 3);
10438 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
10439 /*str=*/NULL_TREE, size);
10441 if (!tree_fits_uhwi_p (size))
10442 return NULL_RTX;
10444 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10446 /* Avoid transforming the checking call to an ordinary one when
10447 an overflow has been detected or when the call couldn't be
10448 validated because the size is not constant. */
10449 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10450 return NULL_RTX;
10452 tree fn = NULL_TREE;
10453 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10454 mem{cpy,pcpy,move,set} is available. */
10455 switch (fcode)
10457 case BUILT_IN_MEMCPY_CHK:
10458 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10459 break;
10460 case BUILT_IN_MEMPCPY_CHK:
10461 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10462 break;
10463 case BUILT_IN_MEMMOVE_CHK:
10464 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10465 break;
10466 case BUILT_IN_MEMSET_CHK:
10467 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10468 break;
10469 default:
10470 break;
10473 if (! fn)
10474 return NULL_RTX;
10476 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10477 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10478 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10479 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10481 else if (fcode == BUILT_IN_MEMSET_CHK)
10482 return NULL_RTX;
10483 else
10485 unsigned int dest_align = get_pointer_alignment (dest);
10487 /* If DEST is not a pointer type, call the normal function. */
10488 if (dest_align == 0)
10489 return NULL_RTX;
10491 /* If SRC and DEST are the same (and not volatile), do nothing. */
10492 if (operand_equal_p (src, dest, 0))
10494 tree expr;
10496 if (fcode != BUILT_IN_MEMPCPY_CHK)
10498 /* Evaluate and ignore LEN in case it has side-effects. */
10499 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10500 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10503 expr = fold_build_pointer_plus (dest, len);
10504 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10507 /* __memmove_chk special case. */
10508 if (fcode == BUILT_IN_MEMMOVE_CHK)
10510 unsigned int src_align = get_pointer_alignment (src);
10512 if (src_align == 0)
10513 return NULL_RTX;
10515 /* If src is categorized for a readonly section we can use
10516 normal __memcpy_chk. */
10517 if (readonly_data_expr (src))
10519 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10520 if (!fn)
10521 return NULL_RTX;
10522 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10523 dest, src, len, size);
10524 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10525 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10526 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10529 return NULL_RTX;
10533 /* Emit warning if a buffer overflow is detected at compile time. */
10535 static void
10536 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10538 /* The source string. */
10539 tree srcstr = NULL_TREE;
10540 /* The size of the destination object. */
10541 tree objsize = NULL_TREE;
10542 /* The string that is being concatenated with (as in __strcat_chk)
10543 or null if it isn't. */
10544 tree catstr = NULL_TREE;
10545 /* The maximum length of the source sequence in a bounded operation
10546 (such as __strncat_chk) or null if the operation isn't bounded
10547 (such as __strcat_chk). */
10548 tree maxread = NULL_TREE;
10549 /* The exact size of the access (such as in __strncpy_chk). */
10550 tree size = NULL_TREE;
10552 switch (fcode)
10554 case BUILT_IN_STRCPY_CHK:
10555 case BUILT_IN_STPCPY_CHK:
10556 srcstr = CALL_EXPR_ARG (exp, 1);
10557 objsize = CALL_EXPR_ARG (exp, 2);
10558 break;
10560 case BUILT_IN_STRCAT_CHK:
10561 /* For __strcat_chk the warning will be emitted only if overflowing
10562 by at least strlen (dest) + 1 bytes. */
10563 catstr = CALL_EXPR_ARG (exp, 0);
10564 srcstr = CALL_EXPR_ARG (exp, 1);
10565 objsize = CALL_EXPR_ARG (exp, 2);
10566 break;
10568 case BUILT_IN_STRNCAT_CHK:
10569 catstr = CALL_EXPR_ARG (exp, 0);
10570 srcstr = CALL_EXPR_ARG (exp, 1);
10571 maxread = CALL_EXPR_ARG (exp, 2);
10572 objsize = CALL_EXPR_ARG (exp, 3);
10573 break;
10575 case BUILT_IN_STRNCPY_CHK:
10576 case BUILT_IN_STPNCPY_CHK:
10577 srcstr = CALL_EXPR_ARG (exp, 1);
10578 size = CALL_EXPR_ARG (exp, 2);
10579 objsize = CALL_EXPR_ARG (exp, 3);
10580 break;
10582 case BUILT_IN_SNPRINTF_CHK:
10583 case BUILT_IN_VSNPRINTF_CHK:
10584 maxread = CALL_EXPR_ARG (exp, 1);
10585 objsize = CALL_EXPR_ARG (exp, 3);
10586 break;
10587 default:
10588 gcc_unreachable ();
10591 if (catstr && maxread)
10593 /* Check __strncat_chk. There is no way to determine the length
10594 of the string to which the source string is being appended so
10595 just warn when the length of the source string is not known. */
10596 check_strncat_sizes (exp, objsize);
10597 return;
10600 /* The destination argument is the first one for all built-ins above. */
10601 tree dst = CALL_EXPR_ARG (exp, 0);
10603 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
10606 /* Emit warning if a buffer overflow is detected at compile time
10607 in __sprintf_chk/__vsprintf_chk calls. */
10609 static void
10610 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10612 tree size, len, fmt;
10613 const char *fmt_str;
10614 int nargs = call_expr_nargs (exp);
10616 /* Verify the required arguments in the original call. */
10618 if (nargs < 4)
10619 return;
10620 size = CALL_EXPR_ARG (exp, 2);
10621 fmt = CALL_EXPR_ARG (exp, 3);
10623 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10624 return;
10626 /* Check whether the format is a literal string constant. */
10627 fmt_str = c_getstr (fmt);
10628 if (fmt_str == NULL)
10629 return;
10631 if (!init_target_chars ())
10632 return;
10634 /* If the format doesn't contain % args or %%, we know its size. */
10635 if (strchr (fmt_str, target_percent) == 0)
10636 len = build_int_cstu (size_type_node, strlen (fmt_str));
10637 /* If the format is "%s" and first ... argument is a string literal,
10638 we know it too. */
10639 else if (fcode == BUILT_IN_SPRINTF_CHK
10640 && strcmp (fmt_str, target_percent_s) == 0)
10642 tree arg;
10644 if (nargs < 5)
10645 return;
10646 arg = CALL_EXPR_ARG (exp, 4);
10647 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10648 return;
10650 len = c_strlen (arg, 1);
10651 if (!len || ! tree_fits_uhwi_p (len))
10652 return;
10654 else
10655 return;
10657 /* Add one for the terminating nul. */
10658 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10660 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
10661 /*maxread=*/NULL_TREE, len, size);
10664 /* Emit warning if a free is called with address of a variable. */
10666 static void
10667 maybe_emit_free_warning (tree exp)
10669 if (call_expr_nargs (exp) != 1)
10670 return;
10672 tree arg = CALL_EXPR_ARG (exp, 0);
10674 STRIP_NOPS (arg);
10675 if (TREE_CODE (arg) != ADDR_EXPR)
10676 return;
10678 arg = get_base_address (TREE_OPERAND (arg, 0));
10679 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
10680 return;
10682 if (SSA_VAR_P (arg))
10683 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10684 "%Kattempt to free a non-heap object %qD", exp, arg);
10685 else
10686 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10687 "%Kattempt to free a non-heap object", exp);
10690 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10691 if possible. */
10693 static tree
10694 fold_builtin_object_size (tree ptr, tree ost)
10696 unsigned HOST_WIDE_INT bytes;
10697 int object_size_type;
10699 if (!validate_arg (ptr, POINTER_TYPE)
10700 || !validate_arg (ost, INTEGER_TYPE))
10701 return NULL_TREE;
10703 STRIP_NOPS (ost);
10705 if (TREE_CODE (ost) != INTEGER_CST
10706 || tree_int_cst_sgn (ost) < 0
10707 || compare_tree_int (ost, 3) > 0)
10708 return NULL_TREE;
10710 object_size_type = tree_to_shwi (ost);
10712 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10713 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10714 and (size_t) 0 for types 2 and 3. */
10715 if (TREE_SIDE_EFFECTS (ptr))
10716 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10718 if (TREE_CODE (ptr) == ADDR_EXPR)
10720 compute_builtin_object_size (ptr, object_size_type, &bytes);
10721 if (wi::fits_to_tree_p (bytes, size_type_node))
10722 return build_int_cstu (size_type_node, bytes);
10724 else if (TREE_CODE (ptr) == SSA_NAME)
10726 /* If object size is not known yet, delay folding until
10727 later. Maybe subsequent passes will help determining
10728 it. */
10729 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10730 && wi::fits_to_tree_p (bytes, size_type_node))
10731 return build_int_cstu (size_type_node, bytes);
10734 return NULL_TREE;
10737 /* Builtins with folding operations that operate on "..." arguments
10738 need special handling; we need to store the arguments in a convenient
10739 data structure before attempting any folding. Fortunately there are
10740 only a few builtins that fall into this category. FNDECL is the
10741 function, EXP is the CALL_EXPR for the call. */
10743 static tree
10744 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10746 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10747 tree ret = NULL_TREE;
10749 switch (fcode)
10751 case BUILT_IN_FPCLASSIFY:
10752 ret = fold_builtin_fpclassify (loc, args, nargs);
10753 break;
10755 default:
10756 break;
10758 if (ret)
10760 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10761 SET_EXPR_LOCATION (ret, loc);
10762 TREE_NO_WARNING (ret) = 1;
10763 return ret;
10765 return NULL_TREE;
10768 /* Initialize format string characters in the target charset. */
10770 bool
10771 init_target_chars (void)
10773 static bool init;
10774 if (!init)
10776 target_newline = lang_hooks.to_target_charset ('\n');
10777 target_percent = lang_hooks.to_target_charset ('%');
10778 target_c = lang_hooks.to_target_charset ('c');
10779 target_s = lang_hooks.to_target_charset ('s');
10780 if (target_newline == 0 || target_percent == 0 || target_c == 0
10781 || target_s == 0)
10782 return false;
10784 target_percent_c[0] = target_percent;
10785 target_percent_c[1] = target_c;
10786 target_percent_c[2] = '\0';
10788 target_percent_s[0] = target_percent;
10789 target_percent_s[1] = target_s;
10790 target_percent_s[2] = '\0';
10792 target_percent_s_newline[0] = target_percent;
10793 target_percent_s_newline[1] = target_s;
10794 target_percent_s_newline[2] = target_newline;
10795 target_percent_s_newline[3] = '\0';
10797 init = true;
10799 return true;
10802 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10803 and no overflow/underflow occurred. INEXACT is true if M was not
10804 exactly calculated. TYPE is the tree type for the result. This
10805 function assumes that you cleared the MPFR flags and then
10806 calculated M to see if anything subsequently set a flag prior to
10807 entering this function. Return NULL_TREE if any checks fail. */
10809 static tree
10810 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10812 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10813 overflow/underflow occurred. If -frounding-math, proceed iff the
10814 result of calling FUNC was exact. */
10815 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10816 && (!flag_rounding_math || !inexact))
10818 REAL_VALUE_TYPE rr;
10820 real_from_mpfr (&rr, m, type, GMP_RNDN);
10821 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10822 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10823 but the mpft_t is not, then we underflowed in the
10824 conversion. */
10825 if (real_isfinite (&rr)
10826 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10828 REAL_VALUE_TYPE rmode;
10830 real_convert (&rmode, TYPE_MODE (type), &rr);
10831 /* Proceed iff the specified mode can hold the value. */
10832 if (real_identical (&rmode, &rr))
10833 return build_real (type, rmode);
10836 return NULL_TREE;
10839 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10840 number and no overflow/underflow occurred. INEXACT is true if M
10841 was not exactly calculated. TYPE is the tree type for the result.
10842 This function assumes that you cleared the MPFR flags and then
10843 calculated M to see if anything subsequently set a flag prior to
10844 entering this function. Return NULL_TREE if any checks fail, if
10845 FORCE_CONVERT is true, then bypass the checks. */
10847 static tree
10848 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10850 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10851 overflow/underflow occurred. If -frounding-math, proceed iff the
10852 result of calling FUNC was exact. */
10853 if (force_convert
10854 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10855 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10856 && (!flag_rounding_math || !inexact)))
10858 REAL_VALUE_TYPE re, im;
10860 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10861 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10862 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10863 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10864 but the mpft_t is not, then we underflowed in the
10865 conversion. */
10866 if (force_convert
10867 || (real_isfinite (&re) && real_isfinite (&im)
10868 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10869 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10871 REAL_VALUE_TYPE re_mode, im_mode;
10873 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10874 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10875 /* Proceed iff the specified mode can hold the value. */
10876 if (force_convert
10877 || (real_identical (&re_mode, &re)
10878 && real_identical (&im_mode, &im)))
10879 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10880 build_real (TREE_TYPE (type), im_mode));
10883 return NULL_TREE;
10886 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10887 the pointer *(ARG_QUO) and return the result. The type is taken
10888 from the type of ARG0 and is used for setting the precision of the
10889 calculation and results. */
10891 static tree
10892 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10894 tree const type = TREE_TYPE (arg0);
10895 tree result = NULL_TREE;
10897 STRIP_NOPS (arg0);
10898 STRIP_NOPS (arg1);
10900 /* To proceed, MPFR must exactly represent the target floating point
10901 format, which only happens when the target base equals two. */
10902 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10903 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10904 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10906 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10907 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10909 if (real_isfinite (ra0) && real_isfinite (ra1))
10911 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10912 const int prec = fmt->p;
10913 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10914 tree result_rem;
10915 long integer_quo;
10916 mpfr_t m0, m1;
10918 mpfr_inits2 (prec, m0, m1, NULL);
10919 mpfr_from_real (m0, ra0, GMP_RNDN);
10920 mpfr_from_real (m1, ra1, GMP_RNDN);
10921 mpfr_clear_flags ();
10922 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10923 /* Remquo is independent of the rounding mode, so pass
10924 inexact=0 to do_mpfr_ckconv(). */
10925 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10926 mpfr_clears (m0, m1, NULL);
10927 if (result_rem)
10929 /* MPFR calculates quo in the host's long so it may
10930 return more bits in quo than the target int can hold
10931 if sizeof(host long) > sizeof(target int). This can
10932 happen even for native compilers in LP64 mode. In
10933 these cases, modulo the quo value with the largest
10934 number that the target int can hold while leaving one
10935 bit for the sign. */
10936 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10937 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10939 /* Dereference the quo pointer argument. */
10940 arg_quo = build_fold_indirect_ref (arg_quo);
10941 /* Proceed iff a valid pointer type was passed in. */
10942 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10944 /* Set the value. */
10945 tree result_quo
10946 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10947 build_int_cst (TREE_TYPE (arg_quo),
10948 integer_quo));
10949 TREE_SIDE_EFFECTS (result_quo) = 1;
10950 /* Combine the quo assignment with the rem. */
10951 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10952 result_quo, result_rem));
10957 return result;
10960 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10961 resulting value as a tree with type TYPE. The mpfr precision is
10962 set to the precision of TYPE. We assume that this mpfr function
10963 returns zero if the result could be calculated exactly within the
10964 requested precision. In addition, the integer pointer represented
10965 by ARG_SG will be dereferenced and set to the appropriate signgam
10966 (-1,1) value. */
10968 static tree
10969 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10971 tree result = NULL_TREE;
10973 STRIP_NOPS (arg);
10975 /* To proceed, MPFR must exactly represent the target floating point
10976 format, which only happens when the target base equals two. Also
10977 verify ARG is a constant and that ARG_SG is an int pointer. */
10978 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10979 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10980 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10981 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10983 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10985 /* In addition to NaN and Inf, the argument cannot be zero or a
10986 negative integer. */
10987 if (real_isfinite (ra)
10988 && ra->cl != rvc_zero
10989 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10991 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10992 const int prec = fmt->p;
10993 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10994 int inexact, sg;
10995 mpfr_t m;
10996 tree result_lg;
10998 mpfr_init2 (m, prec);
10999 mpfr_from_real (m, ra, GMP_RNDN);
11000 mpfr_clear_flags ();
11001 inexact = mpfr_lgamma (m, &sg, m, rnd);
11002 result_lg = do_mpfr_ckconv (m, type, inexact);
11003 mpfr_clear (m);
11004 if (result_lg)
11006 tree result_sg;
11008 /* Dereference the arg_sg pointer argument. */
11009 arg_sg = build_fold_indirect_ref (arg_sg);
11010 /* Assign the signgam value into *arg_sg. */
11011 result_sg = fold_build2 (MODIFY_EXPR,
11012 TREE_TYPE (arg_sg), arg_sg,
11013 build_int_cst (TREE_TYPE (arg_sg), sg));
11014 TREE_SIDE_EFFECTS (result_sg) = 1;
11015 /* Combine the signgam assignment with the lgamma result. */
11016 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11017 result_sg, result_lg));
11022 return result;
11025 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
11026 mpc function FUNC on it and return the resulting value as a tree
11027 with type TYPE. The mpfr precision is set to the precision of
11028 TYPE. We assume that function FUNC returns zero if the result
11029 could be calculated exactly within the requested precision. If
11030 DO_NONFINITE is true, then fold expressions containing Inf or NaN
11031 in the arguments and/or results. */
11033 tree
11034 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
11035 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
11037 tree result = NULL_TREE;
11039 STRIP_NOPS (arg0);
11040 STRIP_NOPS (arg1);
11042 /* To proceed, MPFR must exactly represent the target floating point
11043 format, which only happens when the target base equals two. */
11044 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
11045 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
11046 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
11047 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
11048 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
11050 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
11051 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
11052 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
11053 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
11055 if (do_nonfinite
11056 || (real_isfinite (re0) && real_isfinite (im0)
11057 && real_isfinite (re1) && real_isfinite (im1)))
11059 const struct real_format *const fmt =
11060 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11061 const int prec = fmt->p;
11062 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
11063 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11064 int inexact;
11065 mpc_t m0, m1;
11067 mpc_init2 (m0, prec);
11068 mpc_init2 (m1, prec);
11069 mpfr_from_real (mpc_realref (m0), re0, rnd);
11070 mpfr_from_real (mpc_imagref (m0), im0, rnd);
11071 mpfr_from_real (mpc_realref (m1), re1, rnd);
11072 mpfr_from_real (mpc_imagref (m1), im1, rnd);
11073 mpfr_clear_flags ();
11074 inexact = func (m0, m0, m1, crnd);
11075 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
11076 mpc_clear (m0);
11077 mpc_clear (m1);
11081 return result;
11084 /* A wrapper function for builtin folding that prevents warnings for
11085 "statement without effect" and the like, caused by removing the
11086 call node earlier than the warning is generated. */
11088 tree
11089 fold_call_stmt (gcall *stmt, bool ignore)
11091 tree ret = NULL_TREE;
11092 tree fndecl = gimple_call_fndecl (stmt);
11093 location_t loc = gimple_location (stmt);
11094 if (fndecl && fndecl_built_in_p (fndecl)
11095 && !gimple_call_va_arg_pack_p (stmt))
11097 int nargs = gimple_call_num_args (stmt);
11098 tree *args = (nargs > 0
11099 ? gimple_call_arg_ptr (stmt, 0)
11100 : &error_mark_node);
11102 if (avoid_folding_inline_builtin (fndecl))
11103 return NULL_TREE;
11104 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11106 return targetm.fold_builtin (fndecl, nargs, args, ignore);
11108 else
11110 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11111 if (ret)
11113 /* Propagate location information from original call to
11114 expansion of builtin. Otherwise things like
11115 maybe_emit_chk_warning, that operate on the expansion
11116 of a builtin, will use the wrong location information. */
11117 if (gimple_has_location (stmt))
11119 tree realret = ret;
11120 if (TREE_CODE (ret) == NOP_EXPR)
11121 realret = TREE_OPERAND (ret, 0);
11122 if (CAN_HAVE_LOCATION_P (realret)
11123 && !EXPR_HAS_LOCATION (realret))
11124 SET_EXPR_LOCATION (realret, loc);
11125 return realret;
11127 return ret;
11131 return NULL_TREE;
11134 /* Look up the function in builtin_decl that corresponds to DECL
11135 and set ASMSPEC as its user assembler name. DECL must be a
11136 function decl that declares a builtin. */
11138 void
11139 set_builtin_user_assembler_name (tree decl, const char *asmspec)
11141 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
11142 && asmspec != 0);
11144 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
11145 set_user_assembler_name (builtin, asmspec);
11147 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
11148 && INT_TYPE_SIZE < BITS_PER_WORD)
11150 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
11151 set_user_assembler_libfunc ("ffs", asmspec);
11152 set_optab_libfunc (ffs_optab, mode, "ffs");
11156 /* Return true if DECL is a builtin that expands to a constant or similarly
11157 simple code. */
11158 bool
11159 is_simple_builtin (tree decl)
11161 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
11162 switch (DECL_FUNCTION_CODE (decl))
11164 /* Builtins that expand to constants. */
11165 case BUILT_IN_CONSTANT_P:
11166 case BUILT_IN_EXPECT:
11167 case BUILT_IN_OBJECT_SIZE:
11168 case BUILT_IN_UNREACHABLE:
11169 /* Simple register moves or loads from stack. */
11170 case BUILT_IN_ASSUME_ALIGNED:
11171 case BUILT_IN_RETURN_ADDRESS:
11172 case BUILT_IN_EXTRACT_RETURN_ADDR:
11173 case BUILT_IN_FROB_RETURN_ADDR:
11174 case BUILT_IN_RETURN:
11175 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
11176 case BUILT_IN_FRAME_ADDRESS:
11177 case BUILT_IN_VA_END:
11178 case BUILT_IN_STACK_SAVE:
11179 case BUILT_IN_STACK_RESTORE:
11180 /* Exception state returns or moves registers around. */
11181 case BUILT_IN_EH_FILTER:
11182 case BUILT_IN_EH_POINTER:
11183 case BUILT_IN_EH_COPY_VALUES:
11184 return true;
11186 default:
11187 return false;
11190 return false;
11193 /* Return true if DECL is a builtin that is not expensive, i.e., they are
11194 most probably expanded inline into reasonably simple code. This is a
11195 superset of is_simple_builtin. */
11196 bool
11197 is_inexpensive_builtin (tree decl)
11199 if (!decl)
11200 return false;
11201 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
11202 return true;
11203 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11204 switch (DECL_FUNCTION_CODE (decl))
11206 case BUILT_IN_ABS:
11207 CASE_BUILT_IN_ALLOCA:
11208 case BUILT_IN_BSWAP16:
11209 case BUILT_IN_BSWAP32:
11210 case BUILT_IN_BSWAP64:
11211 case BUILT_IN_CLZ:
11212 case BUILT_IN_CLZIMAX:
11213 case BUILT_IN_CLZL:
11214 case BUILT_IN_CLZLL:
11215 case BUILT_IN_CTZ:
11216 case BUILT_IN_CTZIMAX:
11217 case BUILT_IN_CTZL:
11218 case BUILT_IN_CTZLL:
11219 case BUILT_IN_FFS:
11220 case BUILT_IN_FFSIMAX:
11221 case BUILT_IN_FFSL:
11222 case BUILT_IN_FFSLL:
11223 case BUILT_IN_IMAXABS:
11224 case BUILT_IN_FINITE:
11225 case BUILT_IN_FINITEF:
11226 case BUILT_IN_FINITEL:
11227 case BUILT_IN_FINITED32:
11228 case BUILT_IN_FINITED64:
11229 case BUILT_IN_FINITED128:
11230 case BUILT_IN_FPCLASSIFY:
11231 case BUILT_IN_ISFINITE:
11232 case BUILT_IN_ISINF_SIGN:
11233 case BUILT_IN_ISINF:
11234 case BUILT_IN_ISINFF:
11235 case BUILT_IN_ISINFL:
11236 case BUILT_IN_ISINFD32:
11237 case BUILT_IN_ISINFD64:
11238 case BUILT_IN_ISINFD128:
11239 case BUILT_IN_ISNAN:
11240 case BUILT_IN_ISNANF:
11241 case BUILT_IN_ISNANL:
11242 case BUILT_IN_ISNAND32:
11243 case BUILT_IN_ISNAND64:
11244 case BUILT_IN_ISNAND128:
11245 case BUILT_IN_ISNORMAL:
11246 case BUILT_IN_ISGREATER:
11247 case BUILT_IN_ISGREATEREQUAL:
11248 case BUILT_IN_ISLESS:
11249 case BUILT_IN_ISLESSEQUAL:
11250 case BUILT_IN_ISLESSGREATER:
11251 case BUILT_IN_ISUNORDERED:
11252 case BUILT_IN_VA_ARG_PACK:
11253 case BUILT_IN_VA_ARG_PACK_LEN:
11254 case BUILT_IN_VA_COPY:
11255 case BUILT_IN_TRAP:
11256 case BUILT_IN_SAVEREGS:
11257 case BUILT_IN_POPCOUNTL:
11258 case BUILT_IN_POPCOUNTLL:
11259 case BUILT_IN_POPCOUNTIMAX:
11260 case BUILT_IN_POPCOUNT:
11261 case BUILT_IN_PARITYL:
11262 case BUILT_IN_PARITYLL:
11263 case BUILT_IN_PARITYIMAX:
11264 case BUILT_IN_PARITY:
11265 case BUILT_IN_LABS:
11266 case BUILT_IN_LLABS:
11267 case BUILT_IN_PREFETCH:
11268 case BUILT_IN_ACC_ON_DEVICE:
11269 return true;
11271 default:
11272 return is_simple_builtin (decl);
11275 return false;
11278 /* Return true if T is a constant and the value cast to a target char
11279 can be represented by a host char.
11280 Store the casted char constant in *P if so. */
11282 bool
11283 target_char_cst_p (tree t, char *p)
11285 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
11286 return false;
11288 *p = (char)tree_to_uhwi (t);
11289 return true;
11292 /* Return true if the builtin DECL is implemented in a standard library.
11293 Otherwise returns false which doesn't guarantee it is not (thus the list of
11294 handled builtins below may be incomplete). */
11296 bool
11297 builtin_with_linkage_p (tree decl)
11299 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11300 switch (DECL_FUNCTION_CODE (decl))
11302 CASE_FLT_FN (BUILT_IN_ACOS):
11303 CASE_FLT_FN (BUILT_IN_ACOSH):
11304 CASE_FLT_FN (BUILT_IN_ASIN):
11305 CASE_FLT_FN (BUILT_IN_ASINH):
11306 CASE_FLT_FN (BUILT_IN_ATAN):
11307 CASE_FLT_FN (BUILT_IN_ATANH):
11308 CASE_FLT_FN (BUILT_IN_ATAN2):
11309 CASE_FLT_FN (BUILT_IN_CBRT):
11310 CASE_FLT_FN (BUILT_IN_CEIL):
11311 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
11312 CASE_FLT_FN (BUILT_IN_COPYSIGN):
11313 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
11314 CASE_FLT_FN (BUILT_IN_COS):
11315 CASE_FLT_FN (BUILT_IN_COSH):
11316 CASE_FLT_FN (BUILT_IN_ERF):
11317 CASE_FLT_FN (BUILT_IN_ERFC):
11318 CASE_FLT_FN (BUILT_IN_EXP):
11319 CASE_FLT_FN (BUILT_IN_EXP2):
11320 CASE_FLT_FN (BUILT_IN_EXPM1):
11321 CASE_FLT_FN (BUILT_IN_FABS):
11322 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
11323 CASE_FLT_FN (BUILT_IN_FDIM):
11324 CASE_FLT_FN (BUILT_IN_FLOOR):
11325 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
11326 CASE_FLT_FN (BUILT_IN_FMA):
11327 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
11328 CASE_FLT_FN (BUILT_IN_FMAX):
11329 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
11330 CASE_FLT_FN (BUILT_IN_FMIN):
11331 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
11332 CASE_FLT_FN (BUILT_IN_FMOD):
11333 CASE_FLT_FN (BUILT_IN_FREXP):
11334 CASE_FLT_FN (BUILT_IN_HYPOT):
11335 CASE_FLT_FN (BUILT_IN_ILOGB):
11336 CASE_FLT_FN (BUILT_IN_LDEXP):
11337 CASE_FLT_FN (BUILT_IN_LGAMMA):
11338 CASE_FLT_FN (BUILT_IN_LLRINT):
11339 CASE_FLT_FN (BUILT_IN_LLROUND):
11340 CASE_FLT_FN (BUILT_IN_LOG):
11341 CASE_FLT_FN (BUILT_IN_LOG10):
11342 CASE_FLT_FN (BUILT_IN_LOG1P):
11343 CASE_FLT_FN (BUILT_IN_LOG2):
11344 CASE_FLT_FN (BUILT_IN_LOGB):
11345 CASE_FLT_FN (BUILT_IN_LRINT):
11346 CASE_FLT_FN (BUILT_IN_LROUND):
11347 CASE_FLT_FN (BUILT_IN_MODF):
11348 CASE_FLT_FN (BUILT_IN_NAN):
11349 CASE_FLT_FN (BUILT_IN_NEARBYINT):
11350 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
11351 CASE_FLT_FN (BUILT_IN_NEXTAFTER):
11352 CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
11353 CASE_FLT_FN (BUILT_IN_POW):
11354 CASE_FLT_FN (BUILT_IN_REMAINDER):
11355 CASE_FLT_FN (BUILT_IN_REMQUO):
11356 CASE_FLT_FN (BUILT_IN_RINT):
11357 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
11358 CASE_FLT_FN (BUILT_IN_ROUND):
11359 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
11360 CASE_FLT_FN (BUILT_IN_SCALBLN):
11361 CASE_FLT_FN (BUILT_IN_SCALBN):
11362 CASE_FLT_FN (BUILT_IN_SIN):
11363 CASE_FLT_FN (BUILT_IN_SINH):
11364 CASE_FLT_FN (BUILT_IN_SINCOS):
11365 CASE_FLT_FN (BUILT_IN_SQRT):
11366 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
11367 CASE_FLT_FN (BUILT_IN_TAN):
11368 CASE_FLT_FN (BUILT_IN_TANH):
11369 CASE_FLT_FN (BUILT_IN_TGAMMA):
11370 CASE_FLT_FN (BUILT_IN_TRUNC):
11371 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
11372 return true;
11373 default:
11374 break;
11376 return false;