[RS6000] rs6000_indirect_call_template
[official-gcc.git] / gcc / builtins.c
blobdcac49d8be1327128039b714160e39d5021158f9
1 /* Expand builtin functions.
2 Copyright (C) 1988-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "params.h"
35 #include "tm_p.h"
36 #include "stringpool.h"
37 #include "tree-vrp.h"
38 #include "tree-ssanames.h"
39 #include "expmed.h"
40 #include "optabs.h"
41 #include "emit-rtl.h"
42 #include "recog.h"
43 #include "diagnostic-core.h"
44 #include "alias.h"
45 #include "fold-const.h"
46 #include "fold-const-call.h"
47 #include "gimple-ssa-warn-restrict.h"
48 #include "stor-layout.h"
49 #include "calls.h"
50 #include "varasm.h"
51 #include "tree-object-size.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
76 struct target_builtins default_target_builtins;
77 #if SWITCHABLE_TARGET
78 struct target_builtins *this_target_builtins = &default_target_builtins;
79 #endif
81 /* Define the names of the builtin function types and codes. */
82 const char *const built_in_class_names[BUILT_IN_LAST]
83 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
85 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
86 const char * built_in_names[(int) END_BUILTINS] =
88 #include "builtins.def"
91 /* Setup an array of builtin_info_type, make sure each element decl is
92 initialized to NULL_TREE. */
93 builtin_info_type builtin_info[(int)END_BUILTINS];
95 /* Non-zero if __builtin_constant_p should be folded right away. */
96 bool force_folding_builtin_constant_p;
98 static rtx c_readstr (const char *, scalar_int_mode);
99 static int target_char_cast (tree, char *);
100 static rtx get_memory_rtx (tree, tree);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx result_vector (int, rtx);
104 static void expand_builtin_prefetch (tree);
105 static rtx expand_builtin_apply_args (void);
106 static rtx expand_builtin_apply_args_1 (void);
107 static rtx expand_builtin_apply (rtx, rtx, rtx);
108 static void expand_builtin_return (rtx);
109 static enum type_class type_to_class (tree);
110 static rtx expand_builtin_classify_type (tree);
111 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
112 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
113 static rtx expand_builtin_interclass_mathfn (tree, rtx);
114 static rtx expand_builtin_sincos (tree);
115 static rtx expand_builtin_cexpi (tree, rtx);
116 static rtx expand_builtin_int_roundingfn (tree, rtx);
117 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
118 static rtx expand_builtin_next_arg (void);
119 static rtx expand_builtin_va_start (tree);
120 static rtx expand_builtin_va_end (tree);
121 static rtx expand_builtin_va_copy (tree);
122 static rtx inline_expand_builtin_string_cmp (tree, rtx);
123 static rtx expand_builtin_strcmp (tree, rtx);
124 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
125 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
126 static rtx expand_builtin_memchr (tree, rtx);
127 static rtx expand_builtin_memcpy (tree, rtx);
128 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
129 rtx target, tree exp,
130 memop_ret retmode);
131 static rtx expand_builtin_memmove (tree, rtx);
132 static rtx expand_builtin_mempcpy (tree, rtx);
133 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
134 static rtx expand_builtin_strcat (tree, rtx);
135 static rtx expand_builtin_strcpy (tree, rtx);
136 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
137 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
138 static rtx expand_builtin_stpncpy (tree, rtx);
139 static rtx expand_builtin_strncat (tree, rtx);
140 static rtx expand_builtin_strncpy (tree, rtx);
141 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
142 static rtx expand_builtin_memset (tree, rtx, machine_mode);
143 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
144 static rtx expand_builtin_bzero (tree);
145 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
146 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
147 static rtx expand_builtin_alloca (tree);
148 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
149 static rtx expand_builtin_frame_address (tree, tree);
150 static tree stabilize_va_list_loc (location_t, tree, int);
151 static rtx expand_builtin_expect (tree, rtx);
152 static rtx expand_builtin_expect_with_probability (tree, rtx);
153 static tree fold_builtin_constant_p (tree);
154 static tree fold_builtin_classify_type (tree);
155 static tree fold_builtin_strlen (location_t, tree, tree);
156 static tree fold_builtin_inf (location_t, tree, int);
157 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
158 static bool validate_arg (const_tree, enum tree_code code);
159 static rtx expand_builtin_fabs (tree, rtx, rtx);
160 static rtx expand_builtin_signbit (tree, rtx);
161 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
162 static tree fold_builtin_isascii (location_t, tree);
163 static tree fold_builtin_toascii (location_t, tree);
164 static tree fold_builtin_isdigit (location_t, tree);
165 static tree fold_builtin_fabs (location_t, tree, tree);
166 static tree fold_builtin_abs (location_t, tree, tree);
167 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
168 enum tree_code);
169 static tree fold_builtin_0 (location_t, tree);
170 static tree fold_builtin_1 (location_t, tree, tree);
171 static tree fold_builtin_2 (location_t, tree, tree, tree);
172 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
173 static tree fold_builtin_varargs (location_t, tree, tree*, int);
175 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
176 static tree fold_builtin_strspn (location_t, tree, tree);
177 static tree fold_builtin_strcspn (location_t, tree, tree);
179 static rtx expand_builtin_object_size (tree);
180 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
181 enum built_in_function);
182 static void maybe_emit_chk_warning (tree, enum built_in_function);
183 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
184 static void maybe_emit_free_warning (tree);
185 static tree fold_builtin_object_size (tree, tree);
187 unsigned HOST_WIDE_INT target_newline;
188 unsigned HOST_WIDE_INT target_percent;
189 static unsigned HOST_WIDE_INT target_c;
190 static unsigned HOST_WIDE_INT target_s;
191 char target_percent_c[3];
192 char target_percent_s[3];
193 char target_percent_s_newline[4];
194 static tree do_mpfr_remquo (tree, tree, tree);
195 static tree do_mpfr_lgamma_r (tree, tree, tree);
196 static void expand_builtin_sync_synchronize (void);
198 /* Return true if NAME starts with __builtin_ or __sync_. */
200 static bool
201 is_builtin_name (const char *name)
203 if (strncmp (name, "__builtin_", 10) == 0)
204 return true;
205 if (strncmp (name, "__sync_", 7) == 0)
206 return true;
207 if (strncmp (name, "__atomic_", 9) == 0)
208 return true;
209 return false;
212 /* Return true if NODE should be considered for inline expansion regardless
213 of the optimization level. This means whenever a function is invoked with
214 its "internal" name, which normally contains the prefix "__builtin". */
216 bool
217 called_as_built_in (tree node)
219 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
220 we want the name used to call the function, not the name it
221 will have. */
222 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
223 return is_builtin_name (name);
226 /* Compute values M and N such that M divides (address of EXP - N) and such
227 that N < M. If these numbers can be determined, store M in alignp and N in
228 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
229 *alignp and any bit-offset to *bitposp.
231 Note that the address (and thus the alignment) computed here is based
232 on the address to which a symbol resolves, whereas DECL_ALIGN is based
233 on the address at which an object is actually located. These two
234 addresses are not always the same. For example, on ARM targets,
235 the address &foo of a Thumb function foo() has the lowest bit set,
236 whereas foo() itself starts on an even address.
238 If ADDR_P is true we are taking the address of the memory reference EXP
239 and thus cannot rely on the access taking place. */
241 static bool
242 get_object_alignment_2 (tree exp, unsigned int *alignp,
243 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
245 poly_int64 bitsize, bitpos;
246 tree offset;
247 machine_mode mode;
248 int unsignedp, reversep, volatilep;
249 unsigned int align = BITS_PER_UNIT;
250 bool known_alignment = false;
252 /* Get the innermost object and the constant (bitpos) and possibly
253 variable (offset) offset of the access. */
254 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
255 &unsignedp, &reversep, &volatilep);
257 /* Extract alignment information from the innermost object and
258 possibly adjust bitpos and offset. */
259 if (TREE_CODE (exp) == FUNCTION_DECL)
261 /* Function addresses can encode extra information besides their
262 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
263 allows the low bit to be used as a virtual bit, we know
264 that the address itself must be at least 2-byte aligned. */
265 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
266 align = 2 * BITS_PER_UNIT;
268 else if (TREE_CODE (exp) == LABEL_DECL)
270 else if (TREE_CODE (exp) == CONST_DECL)
272 /* The alignment of a CONST_DECL is determined by its initializer. */
273 exp = DECL_INITIAL (exp);
274 align = TYPE_ALIGN (TREE_TYPE (exp));
275 if (CONSTANT_CLASS_P (exp))
276 align = targetm.constant_alignment (exp, align);
278 known_alignment = true;
280 else if (DECL_P (exp))
282 align = DECL_ALIGN (exp);
283 known_alignment = true;
285 else if (TREE_CODE (exp) == INDIRECT_REF
286 || TREE_CODE (exp) == MEM_REF
287 || TREE_CODE (exp) == TARGET_MEM_REF)
289 tree addr = TREE_OPERAND (exp, 0);
290 unsigned ptr_align;
291 unsigned HOST_WIDE_INT ptr_bitpos;
292 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
294 /* If the address is explicitely aligned, handle that. */
295 if (TREE_CODE (addr) == BIT_AND_EXPR
296 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
298 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
299 ptr_bitmask *= BITS_PER_UNIT;
300 align = least_bit_hwi (ptr_bitmask);
301 addr = TREE_OPERAND (addr, 0);
304 known_alignment
305 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
306 align = MAX (ptr_align, align);
308 /* Re-apply explicit alignment to the bitpos. */
309 ptr_bitpos &= ptr_bitmask;
311 /* The alignment of the pointer operand in a TARGET_MEM_REF
312 has to take the variable offset parts into account. */
313 if (TREE_CODE (exp) == TARGET_MEM_REF)
315 if (TMR_INDEX (exp))
317 unsigned HOST_WIDE_INT step = 1;
318 if (TMR_STEP (exp))
319 step = TREE_INT_CST_LOW (TMR_STEP (exp));
320 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
322 if (TMR_INDEX2 (exp))
323 align = BITS_PER_UNIT;
324 known_alignment = false;
327 /* When EXP is an actual memory reference then we can use
328 TYPE_ALIGN of a pointer indirection to derive alignment.
329 Do so only if get_pointer_alignment_1 did not reveal absolute
330 alignment knowledge and if using that alignment would
331 improve the situation. */
332 unsigned int talign;
333 if (!addr_p && !known_alignment
334 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
335 && talign > align)
336 align = talign;
337 else
339 /* Else adjust bitpos accordingly. */
340 bitpos += ptr_bitpos;
341 if (TREE_CODE (exp) == MEM_REF
342 || TREE_CODE (exp) == TARGET_MEM_REF)
343 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
346 else if (TREE_CODE (exp) == STRING_CST)
348 /* STRING_CST are the only constant objects we allow to be not
349 wrapped inside a CONST_DECL. */
350 align = TYPE_ALIGN (TREE_TYPE (exp));
351 if (CONSTANT_CLASS_P (exp))
352 align = targetm.constant_alignment (exp, align);
354 known_alignment = true;
357 /* If there is a non-constant offset part extract the maximum
358 alignment that can prevail. */
359 if (offset)
361 unsigned int trailing_zeros = tree_ctz (offset);
362 if (trailing_zeros < HOST_BITS_PER_INT)
364 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
365 if (inner)
366 align = MIN (align, inner);
370 /* Account for the alignment of runtime coefficients, so that the constant
371 bitpos is guaranteed to be accurate. */
372 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
373 if (alt_align != 0 && alt_align < align)
375 align = alt_align;
376 known_alignment = false;
379 *alignp = align;
380 *bitposp = bitpos.coeffs[0] & (align - 1);
381 return known_alignment;
384 /* For a memory reference expression EXP compute values M and N such that M
385 divides (&EXP - N) and such that N < M. If these numbers can be determined,
386 store M in alignp and N in *BITPOSP and return true. Otherwise return false
387 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
389 bool
390 get_object_alignment_1 (tree exp, unsigned int *alignp,
391 unsigned HOST_WIDE_INT *bitposp)
393 return get_object_alignment_2 (exp, alignp, bitposp, false);
396 /* Return the alignment in bits of EXP, an object. */
398 unsigned int
399 get_object_alignment (tree exp)
401 unsigned HOST_WIDE_INT bitpos = 0;
402 unsigned int align;
404 get_object_alignment_1 (exp, &align, &bitpos);
406 /* align and bitpos now specify known low bits of the pointer.
407 ptr & (align - 1) == bitpos. */
409 if (bitpos != 0)
410 align = least_bit_hwi (bitpos);
411 return align;
414 /* For a pointer valued expression EXP compute values M and N such that M
415 divides (EXP - N) and such that N < M. If these numbers can be determined,
416 store M in alignp and N in *BITPOSP and return true. Return false if
417 the results are just a conservative approximation.
419 If EXP is not a pointer, false is returned too. */
421 bool
422 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
423 unsigned HOST_WIDE_INT *bitposp)
425 STRIP_NOPS (exp);
427 if (TREE_CODE (exp) == ADDR_EXPR)
428 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
429 alignp, bitposp, true);
430 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
432 unsigned int align;
433 unsigned HOST_WIDE_INT bitpos;
434 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
435 &align, &bitpos);
436 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
437 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
438 else
440 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
441 if (trailing_zeros < HOST_BITS_PER_INT)
443 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
444 if (inner)
445 align = MIN (align, inner);
448 *alignp = align;
449 *bitposp = bitpos & (align - 1);
450 return res;
452 else if (TREE_CODE (exp) == SSA_NAME
453 && POINTER_TYPE_P (TREE_TYPE (exp)))
455 unsigned int ptr_align, ptr_misalign;
456 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
458 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
460 *bitposp = ptr_misalign * BITS_PER_UNIT;
461 *alignp = ptr_align * BITS_PER_UNIT;
462 /* Make sure to return a sensible alignment when the multiplication
463 by BITS_PER_UNIT overflowed. */
464 if (*alignp == 0)
465 *alignp = 1u << (HOST_BITS_PER_INT - 1);
466 /* We cannot really tell whether this result is an approximation. */
467 return false;
469 else
471 *bitposp = 0;
472 *alignp = BITS_PER_UNIT;
473 return false;
476 else if (TREE_CODE (exp) == INTEGER_CST)
478 *alignp = BIGGEST_ALIGNMENT;
479 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
480 & (BIGGEST_ALIGNMENT - 1));
481 return true;
484 *bitposp = 0;
485 *alignp = BITS_PER_UNIT;
486 return false;
489 /* Return the alignment in bits of EXP, a pointer valued expression.
490 The alignment returned is, by default, the alignment of the thing that
491 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
493 Otherwise, look at the expression to see if we can do better, i.e., if the
494 expression is actually pointing at an object whose alignment is tighter. */
496 unsigned int
497 get_pointer_alignment (tree exp)
499 unsigned HOST_WIDE_INT bitpos = 0;
500 unsigned int align;
502 get_pointer_alignment_1 (exp, &align, &bitpos);
504 /* align and bitpos now specify known low bits of the pointer.
505 ptr & (align - 1) == bitpos. */
507 if (bitpos != 0)
508 align = least_bit_hwi (bitpos);
510 return align;
513 /* Return the number of leading non-zero elements in the sequence
514 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
515 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
517 unsigned
518 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
520 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
522 unsigned n;
524 if (eltsize == 1)
526 /* Optimize the common case of plain char. */
527 for (n = 0; n < maxelts; n++)
529 const char *elt = (const char*) ptr + n;
530 if (!*elt)
531 break;
534 else
536 for (n = 0; n < maxelts; n++)
538 const char *elt = (const char*) ptr + n * eltsize;
539 if (!memcmp (elt, "\0\0\0\0", eltsize))
540 break;
543 return n;
546 /* For a call at LOC to a function FN that expects a string in the argument
547 ARG, issue a diagnostic due to it being a called with an argument
548 declared at NONSTR that is a character array with no terminating NUL. */
550 void
551 warn_string_no_nul (location_t loc, const char *fn, tree arg, tree decl)
553 if (TREE_NO_WARNING (arg))
554 return;
556 loc = expansion_point_location_if_in_system_header (loc);
558 if (warning_at (loc, OPT_Wstringop_overflow_,
559 "%qs argument missing terminating nul", fn))
561 inform (DECL_SOURCE_LOCATION (decl),
562 "referenced argument declared here");
563 TREE_NO_WARNING (arg) = 1;
567 /* If EXP refers to an unterminated constant character array return
568 the declaration of the object of which the array is a member or
569 element and if SIZE is not null, set *SIZE to the size of
570 the unterminated array and set *EXACT if the size is exact or
571 clear it otherwise. Otherwise return null. */
573 tree
574 unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */)
576 /* C_STRLEN will return NULL and set DECL in the info
577 structure if EXP references a unterminated array. */
578 c_strlen_data data;
579 memset (&data, 0, sizeof (c_strlen_data));
580 tree len = c_strlen (exp, 1, &data);
581 if (len == NULL_TREE && data.len && data.decl)
583 if (size)
585 len = data.len;
586 if (data.off)
588 /* Constant offsets are already accounted for in data.len, but
589 not in a SSA_NAME + CST expression. */
590 if (TREE_CODE (data.off) == INTEGER_CST)
591 *exact = true;
592 else if (TREE_CODE (data.off) == PLUS_EXPR
593 && TREE_CODE (TREE_OPERAND (data.off, 1)) == INTEGER_CST)
595 /* Subtract the offset from the size of the array. */
596 *exact = false;
597 tree temp = TREE_OPERAND (data.off, 1);
598 temp = fold_convert (ssizetype, temp);
599 len = fold_build2 (MINUS_EXPR, ssizetype, len, temp);
601 else
602 *exact = false;
604 else
605 *exact = true;
607 *size = len;
609 return data.decl;
612 return NULL_TREE;
615 /* Compute the length of a null-terminated character string or wide
616 character string handling character sizes of 1, 2, and 4 bytes.
617 TREE_STRING_LENGTH is not the right way because it evaluates to
618 the size of the character array in bytes (as opposed to characters)
619 and because it can contain a zero byte in the middle.
621 ONLY_VALUE should be nonzero if the result is not going to be emitted
622 into the instruction stream and zero if it is going to be expanded.
623 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
624 is returned, otherwise NULL, since
625 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
626 evaluate the side-effects.
628 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
629 accesses. Note that this implies the result is not going to be emitted
630 into the instruction stream.
632 Additional information about the string accessed may be recorded
633 in DATA. For example, if SRC references an unterminated string,
634 then the declaration will be stored in the DECL field. If the
635 length of the unterminated string can be determined, it'll be
636 stored in the LEN field. Note this length could well be different
637 than what a C strlen call would return.
639 ELTSIZE is 1 for normal single byte character strings, and 2 or
640 4 for wide characer strings. ELTSIZE is by default 1.
642 The value returned is of type `ssizetype'. */
644 tree
645 c_strlen (tree src, int only_value, c_strlen_data *data, unsigned eltsize)
647 /* If we were not passed a DATA pointer, then get one to a local
648 structure. That avoids having to check DATA for NULL before
649 each time we want to use it. */
650 c_strlen_data local_strlen_data;
651 memset (&local_strlen_data, 0, sizeof (c_strlen_data));
652 if (!data)
653 data = &local_strlen_data;
655 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
656 STRIP_NOPS (src);
657 if (TREE_CODE (src) == COND_EXPR
658 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
660 tree len1, len2;
662 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
663 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
664 if (tree_int_cst_equal (len1, len2))
665 return len1;
668 if (TREE_CODE (src) == COMPOUND_EXPR
669 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
670 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
672 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
674 /* Offset from the beginning of the string in bytes. */
675 tree byteoff;
676 tree memsize;
677 tree decl;
678 src = string_constant (src, &byteoff, &memsize, &decl);
679 if (src == 0)
680 return NULL_TREE;
682 /* Determine the size of the string element. */
683 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
684 return NULL_TREE;
686 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
687 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
688 in case the latter is less than the size of the array, such as when
689 SRC refers to a short string literal used to initialize a large array.
690 In that case, the elements of the array after the terminating NUL are
691 all NUL. */
692 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
693 strelts = strelts / eltsize;
695 if (!tree_fits_uhwi_p (memsize))
696 return NULL_TREE;
698 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
700 /* PTR can point to the byte representation of any string type, including
701 char* and wchar_t*. */
702 const char *ptr = TREE_STRING_POINTER (src);
704 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
706 /* The code below works only for single byte character types. */
707 if (eltsize != 1)
708 return NULL_TREE;
710 /* If the string has an internal NUL character followed by any
711 non-NUL characters (e.g., "foo\0bar"), we can't compute
712 the offset to the following NUL if we don't know where to
713 start searching for it. */
714 unsigned len = string_length (ptr, eltsize, strelts);
716 /* Return when an embedded null character is found or none at all.
717 In the latter case, set the DECL/LEN field in the DATA structure
718 so that callers may examine them. */
719 if (len + 1 < strelts)
720 return NULL_TREE;
721 else if (len >= maxelts)
723 data->decl = decl;
724 data->off = byteoff;
725 data->len = ssize_int (len);
726 return NULL_TREE;
729 /* For empty strings the result should be zero. */
730 if (len == 0)
731 return ssize_int (0);
733 /* We don't know the starting offset, but we do know that the string
734 has no internal zero bytes. If the offset falls within the bounds
735 of the string subtract the offset from the length of the string,
736 and return that. Otherwise the length is zero. Take care to
737 use SAVE_EXPR in case the OFFSET has side-effects. */
738 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
739 : byteoff;
740 offsave = fold_convert_loc (loc, sizetype, offsave);
741 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
742 size_int (len));
743 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
744 offsave);
745 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
746 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
747 build_zero_cst (ssizetype));
750 /* Offset from the beginning of the string in elements. */
751 HOST_WIDE_INT eltoff;
753 /* We have a known offset into the string. Start searching there for
754 a null character if we can represent it as a single HOST_WIDE_INT. */
755 if (byteoff == 0)
756 eltoff = 0;
757 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
758 eltoff = -1;
759 else
760 eltoff = tree_to_uhwi (byteoff) / eltsize;
762 /* If the offset is known to be out of bounds, warn, and call strlen at
763 runtime. */
764 if (eltoff < 0 || eltoff >= maxelts)
766 /* Suppress multiple warnings for propagated constant strings. */
767 if (only_value != 2
768 && !TREE_NO_WARNING (src))
770 warning_at (loc, OPT_Warray_bounds,
771 "offset %qwi outside bounds of constant string",
772 eltoff);
773 TREE_NO_WARNING (src) = 1;
775 return NULL_TREE;
778 /* If eltoff is larger than strelts but less than maxelts the
779 string length is zero, since the excess memory will be zero. */
780 if (eltoff > strelts)
781 return ssize_int (0);
783 /* Use strlen to search for the first zero byte. Since any strings
784 constructed with build_string will have nulls appended, we win even
785 if we get handed something like (char[4])"abcd".
787 Since ELTOFF is our starting index into the string, no further
788 calculation is needed. */
789 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
790 strelts - eltoff);
792 /* Don't know what to return if there was no zero termination.
793 Ideally this would turn into a gcc_checking_assert over time.
794 Set DECL/LEN so callers can examine them. */
795 if (len >= maxelts - eltoff)
797 data->decl = decl;
798 data->off = byteoff;
799 data->len = ssize_int (len);
800 return NULL_TREE;
803 return ssize_int (len);
806 /* Return a constant integer corresponding to target reading
807 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
809 static rtx
810 c_readstr (const char *str, scalar_int_mode mode)
812 HOST_WIDE_INT ch;
813 unsigned int i, j;
814 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
816 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
817 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
818 / HOST_BITS_PER_WIDE_INT;
820 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
821 for (i = 0; i < len; i++)
822 tmp[i] = 0;
824 ch = 1;
825 for (i = 0; i < GET_MODE_SIZE (mode); i++)
827 j = i;
828 if (WORDS_BIG_ENDIAN)
829 j = GET_MODE_SIZE (mode) - i - 1;
830 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
831 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
832 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
833 j *= BITS_PER_UNIT;
835 if (ch)
836 ch = (unsigned char) str[i];
837 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
840 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
841 return immed_wide_int_const (c, mode);
844 /* Cast a target constant CST to target CHAR and if that value fits into
845 host char type, return zero and put that value into variable pointed to by
846 P. */
848 static int
849 target_char_cast (tree cst, char *p)
851 unsigned HOST_WIDE_INT val, hostval;
853 if (TREE_CODE (cst) != INTEGER_CST
854 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
855 return 1;
857 /* Do not care if it fits or not right here. */
858 val = TREE_INT_CST_LOW (cst);
860 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
861 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
863 hostval = val;
864 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
865 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
867 if (val != hostval)
868 return 1;
870 *p = hostval;
871 return 0;
874 /* Similar to save_expr, but assumes that arbitrary code is not executed
875 in between the multiple evaluations. In particular, we assume that a
876 non-addressable local variable will not be modified. */
878 static tree
879 builtin_save_expr (tree exp)
881 if (TREE_CODE (exp) == SSA_NAME
882 || (TREE_ADDRESSABLE (exp) == 0
883 && (TREE_CODE (exp) == PARM_DECL
884 || (VAR_P (exp) && !TREE_STATIC (exp)))))
885 return exp;
887 return save_expr (exp);
890 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
891 times to get the address of either a higher stack frame, or a return
892 address located within it (depending on FNDECL_CODE). */
894 static rtx
895 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
897 int i;
898 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
899 if (tem == NULL_RTX)
901 /* For a zero count with __builtin_return_address, we don't care what
902 frame address we return, because target-specific definitions will
903 override us. Therefore frame pointer elimination is OK, and using
904 the soft frame pointer is OK.
906 For a nonzero count, or a zero count with __builtin_frame_address,
907 we require a stable offset from the current frame pointer to the
908 previous one, so we must use the hard frame pointer, and
909 we must disable frame pointer elimination. */
910 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
911 tem = frame_pointer_rtx;
912 else
914 tem = hard_frame_pointer_rtx;
916 /* Tell reload not to eliminate the frame pointer. */
917 crtl->accesses_prior_frames = 1;
921 if (count > 0)
922 SETUP_FRAME_ADDRESSES ();
924 /* On the SPARC, the return address is not in the frame, it is in a
925 register. There is no way to access it off of the current frame
926 pointer, but it can be accessed off the previous frame pointer by
927 reading the value from the register window save area. */
928 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
929 count--;
931 /* Scan back COUNT frames to the specified frame. */
932 for (i = 0; i < count; i++)
934 /* Assume the dynamic chain pointer is in the word that the
935 frame address points to, unless otherwise specified. */
936 tem = DYNAMIC_CHAIN_ADDRESS (tem);
937 tem = memory_address (Pmode, tem);
938 tem = gen_frame_mem (Pmode, tem);
939 tem = copy_to_reg (tem);
942 /* For __builtin_frame_address, return what we've got. But, on
943 the SPARC for example, we may have to add a bias. */
944 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
945 return FRAME_ADDR_RTX (tem);
947 /* For __builtin_return_address, get the return address from that frame. */
948 #ifdef RETURN_ADDR_RTX
949 tem = RETURN_ADDR_RTX (count, tem);
950 #else
951 tem = memory_address (Pmode,
952 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
953 tem = gen_frame_mem (Pmode, tem);
954 #endif
955 return tem;
958 /* Alias set used for setjmp buffer. */
959 static alias_set_type setjmp_alias_set = -1;
961 /* Construct the leading half of a __builtin_setjmp call. Control will
962 return to RECEIVER_LABEL. This is also called directly by the SJLJ
963 exception handling code. */
965 void
966 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
968 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
969 rtx stack_save;
970 rtx mem;
972 if (setjmp_alias_set == -1)
973 setjmp_alias_set = new_alias_set ();
975 buf_addr = convert_memory_address (Pmode, buf_addr);
977 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
979 /* We store the frame pointer and the address of receiver_label in
980 the buffer and use the rest of it for the stack save area, which
981 is machine-dependent. */
983 mem = gen_rtx_MEM (Pmode, buf_addr);
984 set_mem_alias_set (mem, setjmp_alias_set);
985 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
987 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
988 GET_MODE_SIZE (Pmode))),
989 set_mem_alias_set (mem, setjmp_alias_set);
991 emit_move_insn (validize_mem (mem),
992 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
994 stack_save = gen_rtx_MEM (sa_mode,
995 plus_constant (Pmode, buf_addr,
996 2 * GET_MODE_SIZE (Pmode)));
997 set_mem_alias_set (stack_save, setjmp_alias_set);
998 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1000 /* If there is further processing to do, do it. */
1001 if (targetm.have_builtin_setjmp_setup ())
1002 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
1004 /* We have a nonlocal label. */
1005 cfun->has_nonlocal_label = 1;
1008 /* Construct the trailing part of a __builtin_setjmp call. This is
1009 also called directly by the SJLJ exception handling code.
1010 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
1012 void
1013 expand_builtin_setjmp_receiver (rtx receiver_label)
1015 rtx chain;
1017 /* Mark the FP as used when we get here, so we have to make sure it's
1018 marked as used by this function. */
1019 emit_use (hard_frame_pointer_rtx);
1021 /* Mark the static chain as clobbered here so life information
1022 doesn't get messed up for it. */
1023 chain = rtx_for_static_chain (current_function_decl, true);
1024 if (chain && REG_P (chain))
1025 emit_clobber (chain);
1027 /* Now put in the code to restore the frame pointer, and argument
1028 pointer, if needed. */
1029 if (! targetm.have_nonlocal_goto ())
1031 /* First adjust our frame pointer to its actual value. It was
1032 previously set to the start of the virtual area corresponding to
1033 the stacked variables when we branched here and now needs to be
1034 adjusted to the actual hardware fp value.
1036 Assignments to virtual registers are converted by
1037 instantiate_virtual_regs into the corresponding assignment
1038 to the underlying register (fp in this case) that makes
1039 the original assignment true.
1040 So the following insn will actually be decrementing fp by
1041 TARGET_STARTING_FRAME_OFFSET. */
1042 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
1044 /* Restoring the frame pointer also modifies the hard frame pointer.
1045 Mark it used (so that the previous assignment remains live once
1046 the frame pointer is eliminated) and clobbered (to represent the
1047 implicit update from the assignment). */
1048 emit_use (hard_frame_pointer_rtx);
1049 emit_clobber (hard_frame_pointer_rtx);
1052 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
1054 /* If the argument pointer can be eliminated in favor of the
1055 frame pointer, we don't need to restore it. We assume here
1056 that if such an elimination is present, it can always be used.
1057 This is the case on all known machines; if we don't make this
1058 assumption, we do unnecessary saving on many machines. */
1059 size_t i;
1060 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1062 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1063 if (elim_regs[i].from == ARG_POINTER_REGNUM
1064 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1065 break;
1067 if (i == ARRAY_SIZE (elim_regs))
1069 /* Now restore our arg pointer from the address at which it
1070 was saved in our stack frame. */
1071 emit_move_insn (crtl->args.internal_arg_pointer,
1072 copy_to_reg (get_arg_pointer_save_area ()));
1076 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
1077 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
1078 else if (targetm.have_nonlocal_goto_receiver ())
1079 emit_insn (targetm.gen_nonlocal_goto_receiver ());
1080 else
1081 { /* Nothing */ }
1083 /* We must not allow the code we just generated to be reordered by
1084 scheduling. Specifically, the update of the frame pointer must
1085 happen immediately, not later. */
1086 emit_insn (gen_blockage ());
1089 /* __builtin_longjmp is passed a pointer to an array of five words (not
1090 all will be used on all machines). It operates similarly to the C
1091 library function of the same name, but is more efficient. Much of
1092 the code below is copied from the handling of non-local gotos. */
1094 static void
1095 expand_builtin_longjmp (rtx buf_addr, rtx value)
1097 rtx fp, lab, stack;
1098 rtx_insn *insn, *last;
1099 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1101 /* DRAP is needed for stack realign if longjmp is expanded to current
1102 function */
1103 if (SUPPORTS_STACK_ALIGNMENT)
1104 crtl->need_drap = true;
1106 if (setjmp_alias_set == -1)
1107 setjmp_alias_set = new_alias_set ();
1109 buf_addr = convert_memory_address (Pmode, buf_addr);
1111 buf_addr = force_reg (Pmode, buf_addr);
1113 /* We require that the user must pass a second argument of 1, because
1114 that is what builtin_setjmp will return. */
1115 gcc_assert (value == const1_rtx);
1117 last = get_last_insn ();
1118 if (targetm.have_builtin_longjmp ())
1119 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1120 else
1122 fp = gen_rtx_MEM (Pmode, buf_addr);
1123 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1124 GET_MODE_SIZE (Pmode)));
1126 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1127 2 * GET_MODE_SIZE (Pmode)));
1128 set_mem_alias_set (fp, setjmp_alias_set);
1129 set_mem_alias_set (lab, setjmp_alias_set);
1130 set_mem_alias_set (stack, setjmp_alias_set);
1132 /* Pick up FP, label, and SP from the block and jump. This code is
1133 from expand_goto in stmt.c; see there for detailed comments. */
1134 if (targetm.have_nonlocal_goto ())
1135 /* We have to pass a value to the nonlocal_goto pattern that will
1136 get copied into the static_chain pointer, but it does not matter
1137 what that value is, because builtin_setjmp does not use it. */
1138 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1139 else
1141 lab = copy_to_reg (lab);
1143 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1144 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1146 emit_move_insn (hard_frame_pointer_rtx, fp);
1147 emit_stack_restore (SAVE_NONLOCAL, stack);
1149 emit_use (hard_frame_pointer_rtx);
1150 emit_use (stack_pointer_rtx);
1151 emit_indirect_jump (lab);
1155 /* Search backwards and mark the jump insn as a non-local goto.
1156 Note that this precludes the use of __builtin_longjmp to a
1157 __builtin_setjmp target in the same function. However, we've
1158 already cautioned the user that these functions are for
1159 internal exception handling use only. */
1160 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1162 gcc_assert (insn != last);
1164 if (JUMP_P (insn))
1166 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1167 break;
1169 else if (CALL_P (insn))
1170 break;
1174 static inline bool
1175 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1177 return (iter->i < iter->n);
1180 /* This function validates the types of a function call argument list
1181 against a specified list of tree_codes. If the last specifier is a 0,
1182 that represents an ellipsis, otherwise the last specifier must be a
1183 VOID_TYPE. */
1185 static bool
1186 validate_arglist (const_tree callexpr, ...)
1188 enum tree_code code;
1189 bool res = 0;
1190 va_list ap;
1191 const_call_expr_arg_iterator iter;
1192 const_tree arg;
1194 va_start (ap, callexpr);
1195 init_const_call_expr_arg_iterator (callexpr, &iter);
1197 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1198 tree fn = CALL_EXPR_FN (callexpr);
1199 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1201 for (unsigned argno = 1; ; ++argno)
1203 code = (enum tree_code) va_arg (ap, int);
1205 switch (code)
1207 case 0:
1208 /* This signifies an ellipses, any further arguments are all ok. */
1209 res = true;
1210 goto end;
1211 case VOID_TYPE:
1212 /* This signifies an endlink, if no arguments remain, return
1213 true, otherwise return false. */
1214 res = !more_const_call_expr_args_p (&iter);
1215 goto end;
1216 case POINTER_TYPE:
1217 /* The actual argument must be nonnull when either the whole
1218 called function has been declared nonnull, or when the formal
1219 argument corresponding to the actual argument has been. */
1220 if (argmap
1221 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1223 arg = next_const_call_expr_arg (&iter);
1224 if (!validate_arg (arg, code) || integer_zerop (arg))
1225 goto end;
1226 break;
1228 /* FALLTHRU */
1229 default:
1230 /* If no parameters remain or the parameter's code does not
1231 match the specified code, return false. Otherwise continue
1232 checking any remaining arguments. */
1233 arg = next_const_call_expr_arg (&iter);
1234 if (!validate_arg (arg, code))
1235 goto end;
1236 break;
1240 /* We need gotos here since we can only have one VA_CLOSE in a
1241 function. */
1242 end: ;
1243 va_end (ap);
1245 BITMAP_FREE (argmap);
1247 return res;
1250 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1251 and the address of the save area. */
1253 static rtx
1254 expand_builtin_nonlocal_goto (tree exp)
1256 tree t_label, t_save_area;
1257 rtx r_label, r_save_area, r_fp, r_sp;
1258 rtx_insn *insn;
1260 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1261 return NULL_RTX;
1263 t_label = CALL_EXPR_ARG (exp, 0);
1264 t_save_area = CALL_EXPR_ARG (exp, 1);
1266 r_label = expand_normal (t_label);
1267 r_label = convert_memory_address (Pmode, r_label);
1268 r_save_area = expand_normal (t_save_area);
1269 r_save_area = convert_memory_address (Pmode, r_save_area);
1270 /* Copy the address of the save location to a register just in case it was
1271 based on the frame pointer. */
1272 r_save_area = copy_to_reg (r_save_area);
1273 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1274 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1275 plus_constant (Pmode, r_save_area,
1276 GET_MODE_SIZE (Pmode)));
1278 crtl->has_nonlocal_goto = 1;
1280 /* ??? We no longer need to pass the static chain value, afaik. */
1281 if (targetm.have_nonlocal_goto ())
1282 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1283 else
1285 r_label = copy_to_reg (r_label);
1287 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1288 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1290 /* Restore frame pointer for containing function. */
1291 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1292 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1294 /* USE of hard_frame_pointer_rtx added for consistency;
1295 not clear if really needed. */
1296 emit_use (hard_frame_pointer_rtx);
1297 emit_use (stack_pointer_rtx);
1299 /* If the architecture is using a GP register, we must
1300 conservatively assume that the target function makes use of it.
1301 The prologue of functions with nonlocal gotos must therefore
1302 initialize the GP register to the appropriate value, and we
1303 must then make sure that this value is live at the point
1304 of the jump. (Note that this doesn't necessarily apply
1305 to targets with a nonlocal_goto pattern; they are free
1306 to implement it in their own way. Note also that this is
1307 a no-op if the GP register is a global invariant.) */
1308 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1309 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1310 emit_use (pic_offset_table_rtx);
1312 emit_indirect_jump (r_label);
1315 /* Search backwards to the jump insn and mark it as a
1316 non-local goto. */
1317 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1319 if (JUMP_P (insn))
1321 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1322 break;
1324 else if (CALL_P (insn))
1325 break;
1328 return const0_rtx;
1331 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1332 (not all will be used on all machines) that was passed to __builtin_setjmp.
1333 It updates the stack pointer in that block to the current value. This is
1334 also called directly by the SJLJ exception handling code. */
1336 void
1337 expand_builtin_update_setjmp_buf (rtx buf_addr)
1339 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1340 buf_addr = convert_memory_address (Pmode, buf_addr);
1341 rtx stack_save
1342 = gen_rtx_MEM (sa_mode,
1343 memory_address
1344 (sa_mode,
1345 plus_constant (Pmode, buf_addr,
1346 2 * GET_MODE_SIZE (Pmode))));
1348 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1351 /* Expand a call to __builtin_prefetch. For a target that does not support
1352 data prefetch, evaluate the memory address argument in case it has side
1353 effects. */
1355 static void
1356 expand_builtin_prefetch (tree exp)
1358 tree arg0, arg1, arg2;
1359 int nargs;
1360 rtx op0, op1, op2;
1362 if (!validate_arglist (exp, POINTER_TYPE, 0))
1363 return;
1365 arg0 = CALL_EXPR_ARG (exp, 0);
1367 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1368 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1369 locality). */
1370 nargs = call_expr_nargs (exp);
1371 if (nargs > 1)
1372 arg1 = CALL_EXPR_ARG (exp, 1);
1373 else
1374 arg1 = integer_zero_node;
1375 if (nargs > 2)
1376 arg2 = CALL_EXPR_ARG (exp, 2);
1377 else
1378 arg2 = integer_three_node;
1380 /* Argument 0 is an address. */
1381 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1383 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1384 if (TREE_CODE (arg1) != INTEGER_CST)
1386 error ("second argument to %<__builtin_prefetch%> must be a constant");
1387 arg1 = integer_zero_node;
1389 op1 = expand_normal (arg1);
1390 /* Argument 1 must be either zero or one. */
1391 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1393 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1394 " using zero");
1395 op1 = const0_rtx;
1398 /* Argument 2 (locality) must be a compile-time constant int. */
1399 if (TREE_CODE (arg2) != INTEGER_CST)
1401 error ("third argument to %<__builtin_prefetch%> must be a constant");
1402 arg2 = integer_zero_node;
1404 op2 = expand_normal (arg2);
1405 /* Argument 2 must be 0, 1, 2, or 3. */
1406 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1408 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1409 op2 = const0_rtx;
1412 if (targetm.have_prefetch ())
1414 struct expand_operand ops[3];
1416 create_address_operand (&ops[0], op0);
1417 create_integer_operand (&ops[1], INTVAL (op1));
1418 create_integer_operand (&ops[2], INTVAL (op2));
1419 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1420 return;
1423 /* Don't do anything with direct references to volatile memory, but
1424 generate code to handle other side effects. */
1425 if (!MEM_P (op0) && side_effects_p (op0))
1426 emit_insn (op0);
1429 /* Get a MEM rtx for expression EXP which is the address of an operand
1430 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1431 the maximum length of the block of memory that might be accessed or
1432 NULL if unknown. */
1434 static rtx
1435 get_memory_rtx (tree exp, tree len)
1437 tree orig_exp = exp;
1438 rtx addr, mem;
1440 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1441 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1442 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1443 exp = TREE_OPERAND (exp, 0);
1445 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1446 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1448 /* Get an expression we can use to find the attributes to assign to MEM.
1449 First remove any nops. */
1450 while (CONVERT_EXPR_P (exp)
1451 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1452 exp = TREE_OPERAND (exp, 0);
1454 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1455 (as builtin stringops may alias with anything). */
1456 exp = fold_build2 (MEM_REF,
1457 build_array_type (char_type_node,
1458 build_range_type (sizetype,
1459 size_one_node, len)),
1460 exp, build_int_cst (ptr_type_node, 0));
1462 /* If the MEM_REF has no acceptable address, try to get the base object
1463 from the original address we got, and build an all-aliasing
1464 unknown-sized access to that one. */
1465 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1466 set_mem_attributes (mem, exp, 0);
1467 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1468 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1469 0))))
1471 exp = build_fold_addr_expr (exp);
1472 exp = fold_build2 (MEM_REF,
1473 build_array_type (char_type_node,
1474 build_range_type (sizetype,
1475 size_zero_node,
1476 NULL)),
1477 exp, build_int_cst (ptr_type_node, 0));
1478 set_mem_attributes (mem, exp, 0);
1480 set_mem_alias_set (mem, 0);
1481 return mem;
1484 /* Built-in functions to perform an untyped call and return. */
1486 #define apply_args_mode \
1487 (this_target_builtins->x_apply_args_mode)
1488 #define apply_result_mode \
1489 (this_target_builtins->x_apply_result_mode)
1491 /* Return the size required for the block returned by __builtin_apply_args,
1492 and initialize apply_args_mode. */
1494 static int
1495 apply_args_size (void)
1497 static int size = -1;
1498 int align;
1499 unsigned int regno;
1501 /* The values computed by this function never change. */
1502 if (size < 0)
1504 /* The first value is the incoming arg-pointer. */
1505 size = GET_MODE_SIZE (Pmode);
1507 /* The second value is the structure value address unless this is
1508 passed as an "invisible" first argument. */
1509 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1510 size += GET_MODE_SIZE (Pmode);
1512 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1513 if (FUNCTION_ARG_REGNO_P (regno))
1515 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1517 gcc_assert (mode != VOIDmode);
1519 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1520 if (size % align != 0)
1521 size = CEIL (size, align) * align;
1522 size += GET_MODE_SIZE (mode);
1523 apply_args_mode[regno] = mode;
1525 else
1527 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1530 return size;
1533 /* Return the size required for the block returned by __builtin_apply,
1534 and initialize apply_result_mode. */
1536 static int
1537 apply_result_size (void)
1539 static int size = -1;
1540 int align, regno;
1542 /* The values computed by this function never change. */
1543 if (size < 0)
1545 size = 0;
1547 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1548 if (targetm.calls.function_value_regno_p (regno))
1550 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1552 gcc_assert (mode != VOIDmode);
1554 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1555 if (size % align != 0)
1556 size = CEIL (size, align) * align;
1557 size += GET_MODE_SIZE (mode);
1558 apply_result_mode[regno] = mode;
1560 else
1561 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1563 /* Allow targets that use untyped_call and untyped_return to override
1564 the size so that machine-specific information can be stored here. */
1565 #ifdef APPLY_RESULT_SIZE
1566 size = APPLY_RESULT_SIZE;
1567 #endif
1569 return size;
1572 /* Create a vector describing the result block RESULT. If SAVEP is true,
1573 the result block is used to save the values; otherwise it is used to
1574 restore the values. */
1576 static rtx
1577 result_vector (int savep, rtx result)
1579 int regno, size, align, nelts;
1580 fixed_size_mode mode;
1581 rtx reg, mem;
1582 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1584 size = nelts = 0;
1585 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1586 if ((mode = apply_result_mode[regno]) != VOIDmode)
1588 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1589 if (size % align != 0)
1590 size = CEIL (size, align) * align;
1591 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1592 mem = adjust_address (result, mode, size);
1593 savevec[nelts++] = (savep
1594 ? gen_rtx_SET (mem, reg)
1595 : gen_rtx_SET (reg, mem));
1596 size += GET_MODE_SIZE (mode);
1598 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1601 /* Save the state required to perform an untyped call with the same
1602 arguments as were passed to the current function. */
1604 static rtx
1605 expand_builtin_apply_args_1 (void)
1607 rtx registers, tem;
1608 int size, align, regno;
1609 fixed_size_mode mode;
1610 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1612 /* Create a block where the arg-pointer, structure value address,
1613 and argument registers can be saved. */
1614 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1616 /* Walk past the arg-pointer and structure value address. */
1617 size = GET_MODE_SIZE (Pmode);
1618 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1619 size += GET_MODE_SIZE (Pmode);
1621 /* Save each register used in calling a function to the block. */
1622 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1623 if ((mode = apply_args_mode[regno]) != VOIDmode)
1625 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1626 if (size % align != 0)
1627 size = CEIL (size, align) * align;
1629 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1631 emit_move_insn (adjust_address (registers, mode, size), tem);
1632 size += GET_MODE_SIZE (mode);
1635 /* Save the arg pointer to the block. */
1636 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1637 /* We need the pointer as the caller actually passed them to us, not
1638 as we might have pretended they were passed. Make sure it's a valid
1639 operand, as emit_move_insn isn't expected to handle a PLUS. */
1640 if (STACK_GROWS_DOWNWARD)
1642 = force_operand (plus_constant (Pmode, tem,
1643 crtl->args.pretend_args_size),
1644 NULL_RTX);
1645 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1647 size = GET_MODE_SIZE (Pmode);
1649 /* Save the structure value address unless this is passed as an
1650 "invisible" first argument. */
1651 if (struct_incoming_value)
1653 emit_move_insn (adjust_address (registers, Pmode, size),
1654 copy_to_reg (struct_incoming_value));
1655 size += GET_MODE_SIZE (Pmode);
1658 /* Return the address of the block. */
1659 return copy_addr_to_reg (XEXP (registers, 0));
1662 /* __builtin_apply_args returns block of memory allocated on
1663 the stack into which is stored the arg pointer, structure
1664 value address, static chain, and all the registers that might
1665 possibly be used in performing a function call. The code is
1666 moved to the start of the function so the incoming values are
1667 saved. */
1669 static rtx
1670 expand_builtin_apply_args (void)
1672 /* Don't do __builtin_apply_args more than once in a function.
1673 Save the result of the first call and reuse it. */
1674 if (apply_args_value != 0)
1675 return apply_args_value;
1677 /* When this function is called, it means that registers must be
1678 saved on entry to this function. So we migrate the
1679 call to the first insn of this function. */
1680 rtx temp;
1682 start_sequence ();
1683 temp = expand_builtin_apply_args_1 ();
1684 rtx_insn *seq = get_insns ();
1685 end_sequence ();
1687 apply_args_value = temp;
1689 /* Put the insns after the NOTE that starts the function.
1690 If this is inside a start_sequence, make the outer-level insn
1691 chain current, so the code is placed at the start of the
1692 function. If internal_arg_pointer is a non-virtual pseudo,
1693 it needs to be placed after the function that initializes
1694 that pseudo. */
1695 push_topmost_sequence ();
1696 if (REG_P (crtl->args.internal_arg_pointer)
1697 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1698 emit_insn_before (seq, parm_birth_insn);
1699 else
1700 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1701 pop_topmost_sequence ();
1702 return temp;
1706 /* Perform an untyped call and save the state required to perform an
1707 untyped return of whatever value was returned by the given function. */
1709 static rtx
1710 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1712 int size, align, regno;
1713 fixed_size_mode mode;
1714 rtx incoming_args, result, reg, dest, src;
1715 rtx_call_insn *call_insn;
1716 rtx old_stack_level = 0;
1717 rtx call_fusage = 0;
1718 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1720 arguments = convert_memory_address (Pmode, arguments);
1722 /* Create a block where the return registers can be saved. */
1723 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1725 /* Fetch the arg pointer from the ARGUMENTS block. */
1726 incoming_args = gen_reg_rtx (Pmode);
1727 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1728 if (!STACK_GROWS_DOWNWARD)
1729 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1730 incoming_args, 0, OPTAB_LIB_WIDEN);
1732 /* Push a new argument block and copy the arguments. Do not allow
1733 the (potential) memcpy call below to interfere with our stack
1734 manipulations. */
1735 do_pending_stack_adjust ();
1736 NO_DEFER_POP;
1738 /* Save the stack with nonlocal if available. */
1739 if (targetm.have_save_stack_nonlocal ())
1740 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1741 else
1742 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1744 /* Allocate a block of memory onto the stack and copy the memory
1745 arguments to the outgoing arguments address. We can pass TRUE
1746 as the 4th argument because we just saved the stack pointer
1747 and will restore it right after the call. */
1748 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1750 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1751 may have already set current_function_calls_alloca to true.
1752 current_function_calls_alloca won't be set if argsize is zero,
1753 so we have to guarantee need_drap is true here. */
1754 if (SUPPORTS_STACK_ALIGNMENT)
1755 crtl->need_drap = true;
1757 dest = virtual_outgoing_args_rtx;
1758 if (!STACK_GROWS_DOWNWARD)
1760 if (CONST_INT_P (argsize))
1761 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1762 else
1763 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1765 dest = gen_rtx_MEM (BLKmode, dest);
1766 set_mem_align (dest, PARM_BOUNDARY);
1767 src = gen_rtx_MEM (BLKmode, incoming_args);
1768 set_mem_align (src, PARM_BOUNDARY);
1769 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1771 /* Refer to the argument block. */
1772 apply_args_size ();
1773 arguments = gen_rtx_MEM (BLKmode, arguments);
1774 set_mem_align (arguments, PARM_BOUNDARY);
1776 /* Walk past the arg-pointer and structure value address. */
1777 size = GET_MODE_SIZE (Pmode);
1778 if (struct_value)
1779 size += GET_MODE_SIZE (Pmode);
1781 /* Restore each of the registers previously saved. Make USE insns
1782 for each of these registers for use in making the call. */
1783 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1784 if ((mode = apply_args_mode[regno]) != VOIDmode)
1786 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1787 if (size % align != 0)
1788 size = CEIL (size, align) * align;
1789 reg = gen_rtx_REG (mode, regno);
1790 emit_move_insn (reg, adjust_address (arguments, mode, size));
1791 use_reg (&call_fusage, reg);
1792 size += GET_MODE_SIZE (mode);
1795 /* Restore the structure value address unless this is passed as an
1796 "invisible" first argument. */
1797 size = GET_MODE_SIZE (Pmode);
1798 if (struct_value)
1800 rtx value = gen_reg_rtx (Pmode);
1801 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1802 emit_move_insn (struct_value, value);
1803 if (REG_P (struct_value))
1804 use_reg (&call_fusage, struct_value);
1805 size += GET_MODE_SIZE (Pmode);
1808 /* All arguments and registers used for the call are set up by now! */
1809 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1811 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1812 and we don't want to load it into a register as an optimization,
1813 because prepare_call_address already did it if it should be done. */
1814 if (GET_CODE (function) != SYMBOL_REF)
1815 function = memory_address (FUNCTION_MODE, function);
1817 /* Generate the actual call instruction and save the return value. */
1818 if (targetm.have_untyped_call ())
1820 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1821 emit_call_insn (targetm.gen_untyped_call (mem, result,
1822 result_vector (1, result)));
1824 else if (targetm.have_call_value ())
1826 rtx valreg = 0;
1828 /* Locate the unique return register. It is not possible to
1829 express a call that sets more than one return register using
1830 call_value; use untyped_call for that. In fact, untyped_call
1831 only needs to save the return registers in the given block. */
1832 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1833 if ((mode = apply_result_mode[regno]) != VOIDmode)
1835 gcc_assert (!valreg); /* have_untyped_call required. */
1837 valreg = gen_rtx_REG (mode, regno);
1840 emit_insn (targetm.gen_call_value (valreg,
1841 gen_rtx_MEM (FUNCTION_MODE, function),
1842 const0_rtx, NULL_RTX, const0_rtx));
1844 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1846 else
1847 gcc_unreachable ();
1849 /* Find the CALL insn we just emitted, and attach the register usage
1850 information. */
1851 call_insn = last_call_insn ();
1852 add_function_usage_to (call_insn, call_fusage);
1854 /* Restore the stack. */
1855 if (targetm.have_save_stack_nonlocal ())
1856 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1857 else
1858 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1859 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1861 OK_DEFER_POP;
1863 /* Return the address of the result block. */
1864 result = copy_addr_to_reg (XEXP (result, 0));
1865 return convert_memory_address (ptr_mode, result);
1868 /* Perform an untyped return. */
1870 static void
1871 expand_builtin_return (rtx result)
1873 int size, align, regno;
1874 fixed_size_mode mode;
1875 rtx reg;
1876 rtx_insn *call_fusage = 0;
1878 result = convert_memory_address (Pmode, result);
1880 apply_result_size ();
1881 result = gen_rtx_MEM (BLKmode, result);
1883 if (targetm.have_untyped_return ())
1885 rtx vector = result_vector (0, result);
1886 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1887 emit_barrier ();
1888 return;
1891 /* Restore the return value and note that each value is used. */
1892 size = 0;
1893 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1894 if ((mode = apply_result_mode[regno]) != VOIDmode)
1896 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1897 if (size % align != 0)
1898 size = CEIL (size, align) * align;
1899 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1900 emit_move_insn (reg, adjust_address (result, mode, size));
1902 push_to_sequence (call_fusage);
1903 emit_use (reg);
1904 call_fusage = get_insns ();
1905 end_sequence ();
1906 size += GET_MODE_SIZE (mode);
1909 /* Put the USE insns before the return. */
1910 emit_insn (call_fusage);
1912 /* Return whatever values was restored by jumping directly to the end
1913 of the function. */
1914 expand_naked_return ();
1917 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1919 static enum type_class
1920 type_to_class (tree type)
1922 switch (TREE_CODE (type))
1924 case VOID_TYPE: return void_type_class;
1925 case INTEGER_TYPE: return integer_type_class;
1926 case ENUMERAL_TYPE: return enumeral_type_class;
1927 case BOOLEAN_TYPE: return boolean_type_class;
1928 case POINTER_TYPE: return pointer_type_class;
1929 case REFERENCE_TYPE: return reference_type_class;
1930 case OFFSET_TYPE: return offset_type_class;
1931 case REAL_TYPE: return real_type_class;
1932 case COMPLEX_TYPE: return complex_type_class;
1933 case FUNCTION_TYPE: return function_type_class;
1934 case METHOD_TYPE: return method_type_class;
1935 case RECORD_TYPE: return record_type_class;
1936 case UNION_TYPE:
1937 case QUAL_UNION_TYPE: return union_type_class;
1938 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1939 ? string_type_class : array_type_class);
1940 case LANG_TYPE: return lang_type_class;
1941 default: return no_type_class;
1945 /* Expand a call EXP to __builtin_classify_type. */
1947 static rtx
1948 expand_builtin_classify_type (tree exp)
1950 if (call_expr_nargs (exp))
1951 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1952 return GEN_INT (no_type_class);
1955 /* This helper macro, meant to be used in mathfn_built_in below, determines
1956 which among a set of builtin math functions is appropriate for a given type
1957 mode. The `F' (float) and `L' (long double) are automatically generated
1958 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1959 types, there are additional types that are considered with 'F32', 'F64',
1960 'F128', etc. suffixes. */
1961 #define CASE_MATHFN(MATHFN) \
1962 CASE_CFN_##MATHFN: \
1963 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1964 fcodel = BUILT_IN_##MATHFN##L ; break;
1965 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1966 types. */
1967 #define CASE_MATHFN_FLOATN(MATHFN) \
1968 CASE_CFN_##MATHFN: \
1969 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1970 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1971 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1972 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1973 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1974 break;
1975 /* Similar to above, but appends _R after any F/L suffix. */
1976 #define CASE_MATHFN_REENT(MATHFN) \
1977 case CFN_BUILT_IN_##MATHFN##_R: \
1978 case CFN_BUILT_IN_##MATHFN##F_R: \
1979 case CFN_BUILT_IN_##MATHFN##L_R: \
1980 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1981 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1983 /* Return a function equivalent to FN but operating on floating-point
1984 values of type TYPE, or END_BUILTINS if no such function exists.
1985 This is purely an operation on function codes; it does not guarantee
1986 that the target actually has an implementation of the function. */
1988 static built_in_function
1989 mathfn_built_in_2 (tree type, combined_fn fn)
1991 tree mtype;
1992 built_in_function fcode, fcodef, fcodel;
1993 built_in_function fcodef16 = END_BUILTINS;
1994 built_in_function fcodef32 = END_BUILTINS;
1995 built_in_function fcodef64 = END_BUILTINS;
1996 built_in_function fcodef128 = END_BUILTINS;
1997 built_in_function fcodef32x = END_BUILTINS;
1998 built_in_function fcodef64x = END_BUILTINS;
1999 built_in_function fcodef128x = END_BUILTINS;
2001 switch (fn)
2003 CASE_MATHFN (ACOS)
2004 CASE_MATHFN (ACOSH)
2005 CASE_MATHFN (ASIN)
2006 CASE_MATHFN (ASINH)
2007 CASE_MATHFN (ATAN)
2008 CASE_MATHFN (ATAN2)
2009 CASE_MATHFN (ATANH)
2010 CASE_MATHFN (CBRT)
2011 CASE_MATHFN_FLOATN (CEIL)
2012 CASE_MATHFN (CEXPI)
2013 CASE_MATHFN_FLOATN (COPYSIGN)
2014 CASE_MATHFN (COS)
2015 CASE_MATHFN (COSH)
2016 CASE_MATHFN (DREM)
2017 CASE_MATHFN (ERF)
2018 CASE_MATHFN (ERFC)
2019 CASE_MATHFN (EXP)
2020 CASE_MATHFN (EXP10)
2021 CASE_MATHFN (EXP2)
2022 CASE_MATHFN (EXPM1)
2023 CASE_MATHFN (FABS)
2024 CASE_MATHFN (FDIM)
2025 CASE_MATHFN_FLOATN (FLOOR)
2026 CASE_MATHFN_FLOATN (FMA)
2027 CASE_MATHFN_FLOATN (FMAX)
2028 CASE_MATHFN_FLOATN (FMIN)
2029 CASE_MATHFN (FMOD)
2030 CASE_MATHFN (FREXP)
2031 CASE_MATHFN (GAMMA)
2032 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
2033 CASE_MATHFN (HUGE_VAL)
2034 CASE_MATHFN (HYPOT)
2035 CASE_MATHFN (ILOGB)
2036 CASE_MATHFN (ICEIL)
2037 CASE_MATHFN (IFLOOR)
2038 CASE_MATHFN (INF)
2039 CASE_MATHFN (IRINT)
2040 CASE_MATHFN (IROUND)
2041 CASE_MATHFN (ISINF)
2042 CASE_MATHFN (J0)
2043 CASE_MATHFN (J1)
2044 CASE_MATHFN (JN)
2045 CASE_MATHFN (LCEIL)
2046 CASE_MATHFN (LDEXP)
2047 CASE_MATHFN (LFLOOR)
2048 CASE_MATHFN (LGAMMA)
2049 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
2050 CASE_MATHFN (LLCEIL)
2051 CASE_MATHFN (LLFLOOR)
2052 CASE_MATHFN (LLRINT)
2053 CASE_MATHFN (LLROUND)
2054 CASE_MATHFN (LOG)
2055 CASE_MATHFN (LOG10)
2056 CASE_MATHFN (LOG1P)
2057 CASE_MATHFN (LOG2)
2058 CASE_MATHFN (LOGB)
2059 CASE_MATHFN (LRINT)
2060 CASE_MATHFN (LROUND)
2061 CASE_MATHFN (MODF)
2062 CASE_MATHFN (NAN)
2063 CASE_MATHFN (NANS)
2064 CASE_MATHFN_FLOATN (NEARBYINT)
2065 CASE_MATHFN (NEXTAFTER)
2066 CASE_MATHFN (NEXTTOWARD)
2067 CASE_MATHFN (POW)
2068 CASE_MATHFN (POWI)
2069 CASE_MATHFN (POW10)
2070 CASE_MATHFN (REMAINDER)
2071 CASE_MATHFN (REMQUO)
2072 CASE_MATHFN_FLOATN (RINT)
2073 CASE_MATHFN_FLOATN (ROUND)
2074 CASE_MATHFN (SCALB)
2075 CASE_MATHFN (SCALBLN)
2076 CASE_MATHFN (SCALBN)
2077 CASE_MATHFN (SIGNBIT)
2078 CASE_MATHFN (SIGNIFICAND)
2079 CASE_MATHFN (SIN)
2080 CASE_MATHFN (SINCOS)
2081 CASE_MATHFN (SINH)
2082 CASE_MATHFN_FLOATN (SQRT)
2083 CASE_MATHFN (TAN)
2084 CASE_MATHFN (TANH)
2085 CASE_MATHFN (TGAMMA)
2086 CASE_MATHFN_FLOATN (TRUNC)
2087 CASE_MATHFN (Y0)
2088 CASE_MATHFN (Y1)
2089 CASE_MATHFN (YN)
2091 default:
2092 return END_BUILTINS;
2095 mtype = TYPE_MAIN_VARIANT (type);
2096 if (mtype == double_type_node)
2097 return fcode;
2098 else if (mtype == float_type_node)
2099 return fcodef;
2100 else if (mtype == long_double_type_node)
2101 return fcodel;
2102 else if (mtype == float16_type_node)
2103 return fcodef16;
2104 else if (mtype == float32_type_node)
2105 return fcodef32;
2106 else if (mtype == float64_type_node)
2107 return fcodef64;
2108 else if (mtype == float128_type_node)
2109 return fcodef128;
2110 else if (mtype == float32x_type_node)
2111 return fcodef32x;
2112 else if (mtype == float64x_type_node)
2113 return fcodef64x;
2114 else if (mtype == float128x_type_node)
2115 return fcodef128x;
2116 else
2117 return END_BUILTINS;
2120 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2121 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2122 otherwise use the explicit declaration. If we can't do the conversion,
2123 return null. */
2125 static tree
2126 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2128 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2129 if (fcode2 == END_BUILTINS)
2130 return NULL_TREE;
2132 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2133 return NULL_TREE;
2135 return builtin_decl_explicit (fcode2);
2138 /* Like mathfn_built_in_1, but always use the implicit array. */
2140 tree
2141 mathfn_built_in (tree type, combined_fn fn)
2143 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2146 /* Like mathfn_built_in_1, but take a built_in_function and
2147 always use the implicit array. */
2149 tree
2150 mathfn_built_in (tree type, enum built_in_function fn)
2152 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2155 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2156 return its code, otherwise return IFN_LAST. Note that this function
2157 only tests whether the function is defined in internals.def, not whether
2158 it is actually available on the target. */
2160 internal_fn
2161 associated_internal_fn (tree fndecl)
2163 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2164 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2165 switch (DECL_FUNCTION_CODE (fndecl))
2167 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2168 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2169 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2170 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2171 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2172 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2173 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2174 #include "internal-fn.def"
2176 CASE_FLT_FN (BUILT_IN_POW10):
2177 return IFN_EXP10;
2179 CASE_FLT_FN (BUILT_IN_DREM):
2180 return IFN_REMAINDER;
2182 CASE_FLT_FN (BUILT_IN_SCALBN):
2183 CASE_FLT_FN (BUILT_IN_SCALBLN):
2184 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2185 return IFN_LDEXP;
2186 return IFN_LAST;
2188 default:
2189 return IFN_LAST;
2193 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2194 on the current target by a call to an internal function, return the
2195 code of that internal function, otherwise return IFN_LAST. The caller
2196 is responsible for ensuring that any side-effects of the built-in
2197 call are dealt with correctly. E.g. if CALL sets errno, the caller
2198 must decide that the errno result isn't needed or make it available
2199 in some other way. */
2201 internal_fn
2202 replacement_internal_fn (gcall *call)
2204 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2206 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2207 if (ifn != IFN_LAST)
2209 tree_pair types = direct_internal_fn_types (ifn, call);
2210 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2211 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2212 return ifn;
2215 return IFN_LAST;
2218 /* Expand a call to the builtin trinary math functions (fma).
2219 Return NULL_RTX if a normal call should be emitted rather than expanding the
2220 function in-line. EXP is the expression that is a call to the builtin
2221 function; if convenient, the result should be placed in TARGET.
2222 SUBTARGET may be used as the target for computing one of EXP's
2223 operands. */
2225 static rtx
2226 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2228 optab builtin_optab;
2229 rtx op0, op1, op2, result;
2230 rtx_insn *insns;
2231 tree fndecl = get_callee_fndecl (exp);
2232 tree arg0, arg1, arg2;
2233 machine_mode mode;
2235 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2236 return NULL_RTX;
2238 arg0 = CALL_EXPR_ARG (exp, 0);
2239 arg1 = CALL_EXPR_ARG (exp, 1);
2240 arg2 = CALL_EXPR_ARG (exp, 2);
2242 switch (DECL_FUNCTION_CODE (fndecl))
2244 CASE_FLT_FN (BUILT_IN_FMA):
2245 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2246 builtin_optab = fma_optab; break;
2247 default:
2248 gcc_unreachable ();
2251 /* Make a suitable register to place result in. */
2252 mode = TYPE_MODE (TREE_TYPE (exp));
2254 /* Before working hard, check whether the instruction is available. */
2255 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2256 return NULL_RTX;
2258 result = gen_reg_rtx (mode);
2260 /* Always stabilize the argument list. */
2261 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2262 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2263 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2265 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2266 op1 = expand_normal (arg1);
2267 op2 = expand_normal (arg2);
2269 start_sequence ();
2271 /* Compute into RESULT.
2272 Set RESULT to wherever the result comes back. */
2273 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2274 result, 0);
2276 /* If we were unable to expand via the builtin, stop the sequence
2277 (without outputting the insns) and call to the library function
2278 with the stabilized argument list. */
2279 if (result == 0)
2281 end_sequence ();
2282 return expand_call (exp, target, target == const0_rtx);
2285 /* Output the entire sequence. */
2286 insns = get_insns ();
2287 end_sequence ();
2288 emit_insn (insns);
2290 return result;
2293 /* Expand a call to the builtin sin and cos math functions.
2294 Return NULL_RTX if a normal call should be emitted rather than expanding the
2295 function in-line. EXP is the expression that is a call to the builtin
2296 function; if convenient, the result should be placed in TARGET.
2297 SUBTARGET may be used as the target for computing one of EXP's
2298 operands. */
2300 static rtx
2301 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2303 optab builtin_optab;
2304 rtx op0;
2305 rtx_insn *insns;
2306 tree fndecl = get_callee_fndecl (exp);
2307 machine_mode mode;
2308 tree arg;
2310 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2311 return NULL_RTX;
2313 arg = CALL_EXPR_ARG (exp, 0);
2315 switch (DECL_FUNCTION_CODE (fndecl))
2317 CASE_FLT_FN (BUILT_IN_SIN):
2318 CASE_FLT_FN (BUILT_IN_COS):
2319 builtin_optab = sincos_optab; break;
2320 default:
2321 gcc_unreachable ();
2324 /* Make a suitable register to place result in. */
2325 mode = TYPE_MODE (TREE_TYPE (exp));
2327 /* Check if sincos insn is available, otherwise fallback
2328 to sin or cos insn. */
2329 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2330 switch (DECL_FUNCTION_CODE (fndecl))
2332 CASE_FLT_FN (BUILT_IN_SIN):
2333 builtin_optab = sin_optab; break;
2334 CASE_FLT_FN (BUILT_IN_COS):
2335 builtin_optab = cos_optab; break;
2336 default:
2337 gcc_unreachable ();
2340 /* Before working hard, check whether the instruction is available. */
2341 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2343 rtx result = gen_reg_rtx (mode);
2345 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2346 need to expand the argument again. This way, we will not perform
2347 side-effects more the once. */
2348 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2350 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2352 start_sequence ();
2354 /* Compute into RESULT.
2355 Set RESULT to wherever the result comes back. */
2356 if (builtin_optab == sincos_optab)
2358 int ok;
2360 switch (DECL_FUNCTION_CODE (fndecl))
2362 CASE_FLT_FN (BUILT_IN_SIN):
2363 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2364 break;
2365 CASE_FLT_FN (BUILT_IN_COS):
2366 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2367 break;
2368 default:
2369 gcc_unreachable ();
2371 gcc_assert (ok);
2373 else
2374 result = expand_unop (mode, builtin_optab, op0, result, 0);
2376 if (result != 0)
2378 /* Output the entire sequence. */
2379 insns = get_insns ();
2380 end_sequence ();
2381 emit_insn (insns);
2382 return result;
2385 /* If we were unable to expand via the builtin, stop the sequence
2386 (without outputting the insns) and call to the library function
2387 with the stabilized argument list. */
2388 end_sequence ();
2391 return expand_call (exp, target, target == const0_rtx);
2394 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2395 return an RTL instruction code that implements the functionality.
2396 If that isn't possible or available return CODE_FOR_nothing. */
2398 static enum insn_code
2399 interclass_mathfn_icode (tree arg, tree fndecl)
2401 bool errno_set = false;
2402 optab builtin_optab = unknown_optab;
2403 machine_mode mode;
2405 switch (DECL_FUNCTION_CODE (fndecl))
2407 CASE_FLT_FN (BUILT_IN_ILOGB):
2408 errno_set = true; builtin_optab = ilogb_optab; break;
2409 CASE_FLT_FN (BUILT_IN_ISINF):
2410 builtin_optab = isinf_optab; break;
2411 case BUILT_IN_ISNORMAL:
2412 case BUILT_IN_ISFINITE:
2413 CASE_FLT_FN (BUILT_IN_FINITE):
2414 case BUILT_IN_FINITED32:
2415 case BUILT_IN_FINITED64:
2416 case BUILT_IN_FINITED128:
2417 case BUILT_IN_ISINFD32:
2418 case BUILT_IN_ISINFD64:
2419 case BUILT_IN_ISINFD128:
2420 /* These builtins have no optabs (yet). */
2421 break;
2422 default:
2423 gcc_unreachable ();
2426 /* There's no easy way to detect the case we need to set EDOM. */
2427 if (flag_errno_math && errno_set)
2428 return CODE_FOR_nothing;
2430 /* Optab mode depends on the mode of the input argument. */
2431 mode = TYPE_MODE (TREE_TYPE (arg));
2433 if (builtin_optab)
2434 return optab_handler (builtin_optab, mode);
2435 return CODE_FOR_nothing;
2438 /* Expand a call to one of the builtin math functions that operate on
2439 floating point argument and output an integer result (ilogb, isinf,
2440 isnan, etc).
2441 Return 0 if a normal call should be emitted rather than expanding the
2442 function in-line. EXP is the expression that is a call to the builtin
2443 function; if convenient, the result should be placed in TARGET. */
2445 static rtx
2446 expand_builtin_interclass_mathfn (tree exp, rtx target)
2448 enum insn_code icode = CODE_FOR_nothing;
2449 rtx op0;
2450 tree fndecl = get_callee_fndecl (exp);
2451 machine_mode mode;
2452 tree arg;
2454 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2455 return NULL_RTX;
2457 arg = CALL_EXPR_ARG (exp, 0);
2458 icode = interclass_mathfn_icode (arg, fndecl);
2459 mode = TYPE_MODE (TREE_TYPE (arg));
2461 if (icode != CODE_FOR_nothing)
2463 struct expand_operand ops[1];
2464 rtx_insn *last = get_last_insn ();
2465 tree orig_arg = arg;
2467 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2468 need to expand the argument again. This way, we will not perform
2469 side-effects more the once. */
2470 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2472 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2474 if (mode != GET_MODE (op0))
2475 op0 = convert_to_mode (mode, op0, 0);
2477 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2478 if (maybe_legitimize_operands (icode, 0, 1, ops)
2479 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2480 return ops[0].value;
2482 delete_insns_since (last);
2483 CALL_EXPR_ARG (exp, 0) = orig_arg;
2486 return NULL_RTX;
2489 /* Expand a call to the builtin sincos math function.
2490 Return NULL_RTX if a normal call should be emitted rather than expanding the
2491 function in-line. EXP is the expression that is a call to the builtin
2492 function. */
2494 static rtx
2495 expand_builtin_sincos (tree exp)
2497 rtx op0, op1, op2, target1, target2;
2498 machine_mode mode;
2499 tree arg, sinp, cosp;
2500 int result;
2501 location_t loc = EXPR_LOCATION (exp);
2502 tree alias_type, alias_off;
2504 if (!validate_arglist (exp, REAL_TYPE,
2505 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2506 return NULL_RTX;
2508 arg = CALL_EXPR_ARG (exp, 0);
2509 sinp = CALL_EXPR_ARG (exp, 1);
2510 cosp = CALL_EXPR_ARG (exp, 2);
2512 /* Make a suitable register to place result in. */
2513 mode = TYPE_MODE (TREE_TYPE (arg));
2515 /* Check if sincos insn is available, otherwise emit the call. */
2516 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2517 return NULL_RTX;
2519 target1 = gen_reg_rtx (mode);
2520 target2 = gen_reg_rtx (mode);
2522 op0 = expand_normal (arg);
2523 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2524 alias_off = build_int_cst (alias_type, 0);
2525 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2526 sinp, alias_off));
2527 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2528 cosp, alias_off));
2530 /* Compute into target1 and target2.
2531 Set TARGET to wherever the result comes back. */
2532 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2533 gcc_assert (result);
2535 /* Move target1 and target2 to the memory locations indicated
2536 by op1 and op2. */
2537 emit_move_insn (op1, target1);
2538 emit_move_insn (op2, target2);
2540 return const0_rtx;
2543 /* Expand a call to the internal cexpi builtin to the sincos math function.
2544 EXP is the expression that is a call to the builtin function; if convenient,
2545 the result should be placed in TARGET. */
2547 static rtx
2548 expand_builtin_cexpi (tree exp, rtx target)
2550 tree fndecl = get_callee_fndecl (exp);
2551 tree arg, type;
2552 machine_mode mode;
2553 rtx op0, op1, op2;
2554 location_t loc = EXPR_LOCATION (exp);
2556 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2557 return NULL_RTX;
2559 arg = CALL_EXPR_ARG (exp, 0);
2560 type = TREE_TYPE (arg);
2561 mode = TYPE_MODE (TREE_TYPE (arg));
2563 /* Try expanding via a sincos optab, fall back to emitting a libcall
2564 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2565 is only generated from sincos, cexp or if we have either of them. */
2566 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2568 op1 = gen_reg_rtx (mode);
2569 op2 = gen_reg_rtx (mode);
2571 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2573 /* Compute into op1 and op2. */
2574 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2576 else if (targetm.libc_has_function (function_sincos))
2578 tree call, fn = NULL_TREE;
2579 tree top1, top2;
2580 rtx op1a, op2a;
2582 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2583 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2584 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2585 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2586 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2587 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2588 else
2589 gcc_unreachable ();
2591 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2592 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2593 op1a = copy_addr_to_reg (XEXP (op1, 0));
2594 op2a = copy_addr_to_reg (XEXP (op2, 0));
2595 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2596 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2598 /* Make sure not to fold the sincos call again. */
2599 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2600 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2601 call, 3, arg, top1, top2));
2603 else
2605 tree call, fn = NULL_TREE, narg;
2606 tree ctype = build_complex_type (type);
2608 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2609 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2610 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2611 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2612 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2613 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2614 else
2615 gcc_unreachable ();
2617 /* If we don't have a decl for cexp create one. This is the
2618 friendliest fallback if the user calls __builtin_cexpi
2619 without full target C99 function support. */
2620 if (fn == NULL_TREE)
2622 tree fntype;
2623 const char *name = NULL;
2625 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2626 name = "cexpf";
2627 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2628 name = "cexp";
2629 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2630 name = "cexpl";
2632 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2633 fn = build_fn_decl (name, fntype);
2636 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2637 build_real (type, dconst0), arg);
2639 /* Make sure not to fold the cexp call again. */
2640 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2641 return expand_expr (build_call_nary (ctype, call, 1, narg),
2642 target, VOIDmode, EXPAND_NORMAL);
2645 /* Now build the proper return type. */
2646 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2647 make_tree (TREE_TYPE (arg), op2),
2648 make_tree (TREE_TYPE (arg), op1)),
2649 target, VOIDmode, EXPAND_NORMAL);
2652 /* Conveniently construct a function call expression. FNDECL names the
2653 function to be called, N is the number of arguments, and the "..."
2654 parameters are the argument expressions. Unlike build_call_exr
2655 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2657 static tree
2658 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2660 va_list ap;
2661 tree fntype = TREE_TYPE (fndecl);
2662 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2664 va_start (ap, n);
2665 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2666 va_end (ap);
2667 SET_EXPR_LOCATION (fn, loc);
2668 return fn;
2671 /* Expand a call to one of the builtin rounding functions gcc defines
2672 as an extension (lfloor and lceil). As these are gcc extensions we
2673 do not need to worry about setting errno to EDOM.
2674 If expanding via optab fails, lower expression to (int)(floor(x)).
2675 EXP is the expression that is a call to the builtin function;
2676 if convenient, the result should be placed in TARGET. */
2678 static rtx
2679 expand_builtin_int_roundingfn (tree exp, rtx target)
2681 convert_optab builtin_optab;
2682 rtx op0, tmp;
2683 rtx_insn *insns;
2684 tree fndecl = get_callee_fndecl (exp);
2685 enum built_in_function fallback_fn;
2686 tree fallback_fndecl;
2687 machine_mode mode;
2688 tree arg;
2690 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2691 gcc_unreachable ();
2693 arg = CALL_EXPR_ARG (exp, 0);
2695 switch (DECL_FUNCTION_CODE (fndecl))
2697 CASE_FLT_FN (BUILT_IN_ICEIL):
2698 CASE_FLT_FN (BUILT_IN_LCEIL):
2699 CASE_FLT_FN (BUILT_IN_LLCEIL):
2700 builtin_optab = lceil_optab;
2701 fallback_fn = BUILT_IN_CEIL;
2702 break;
2704 CASE_FLT_FN (BUILT_IN_IFLOOR):
2705 CASE_FLT_FN (BUILT_IN_LFLOOR):
2706 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2707 builtin_optab = lfloor_optab;
2708 fallback_fn = BUILT_IN_FLOOR;
2709 break;
2711 default:
2712 gcc_unreachable ();
2715 /* Make a suitable register to place result in. */
2716 mode = TYPE_MODE (TREE_TYPE (exp));
2718 target = gen_reg_rtx (mode);
2720 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2721 need to expand the argument again. This way, we will not perform
2722 side-effects more the once. */
2723 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2725 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2727 start_sequence ();
2729 /* Compute into TARGET. */
2730 if (expand_sfix_optab (target, op0, builtin_optab))
2732 /* Output the entire sequence. */
2733 insns = get_insns ();
2734 end_sequence ();
2735 emit_insn (insns);
2736 return target;
2739 /* If we were unable to expand via the builtin, stop the sequence
2740 (without outputting the insns). */
2741 end_sequence ();
2743 /* Fall back to floating point rounding optab. */
2744 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2746 /* For non-C99 targets we may end up without a fallback fndecl here
2747 if the user called __builtin_lfloor directly. In this case emit
2748 a call to the floor/ceil variants nevertheless. This should result
2749 in the best user experience for not full C99 targets. */
2750 if (fallback_fndecl == NULL_TREE)
2752 tree fntype;
2753 const char *name = NULL;
2755 switch (DECL_FUNCTION_CODE (fndecl))
2757 case BUILT_IN_ICEIL:
2758 case BUILT_IN_LCEIL:
2759 case BUILT_IN_LLCEIL:
2760 name = "ceil";
2761 break;
2762 case BUILT_IN_ICEILF:
2763 case BUILT_IN_LCEILF:
2764 case BUILT_IN_LLCEILF:
2765 name = "ceilf";
2766 break;
2767 case BUILT_IN_ICEILL:
2768 case BUILT_IN_LCEILL:
2769 case BUILT_IN_LLCEILL:
2770 name = "ceill";
2771 break;
2772 case BUILT_IN_IFLOOR:
2773 case BUILT_IN_LFLOOR:
2774 case BUILT_IN_LLFLOOR:
2775 name = "floor";
2776 break;
2777 case BUILT_IN_IFLOORF:
2778 case BUILT_IN_LFLOORF:
2779 case BUILT_IN_LLFLOORF:
2780 name = "floorf";
2781 break;
2782 case BUILT_IN_IFLOORL:
2783 case BUILT_IN_LFLOORL:
2784 case BUILT_IN_LLFLOORL:
2785 name = "floorl";
2786 break;
2787 default:
2788 gcc_unreachable ();
2791 fntype = build_function_type_list (TREE_TYPE (arg),
2792 TREE_TYPE (arg), NULL_TREE);
2793 fallback_fndecl = build_fn_decl (name, fntype);
2796 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2798 tmp = expand_normal (exp);
2799 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2801 /* Truncate the result of floating point optab to integer
2802 via expand_fix (). */
2803 target = gen_reg_rtx (mode);
2804 expand_fix (target, tmp, 0);
2806 return target;
2809 /* Expand a call to one of the builtin math functions doing integer
2810 conversion (lrint).
2811 Return 0 if a normal call should be emitted rather than expanding the
2812 function in-line. EXP is the expression that is a call to the builtin
2813 function; if convenient, the result should be placed in TARGET. */
2815 static rtx
2816 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2818 convert_optab builtin_optab;
2819 rtx op0;
2820 rtx_insn *insns;
2821 tree fndecl = get_callee_fndecl (exp);
2822 tree arg;
2823 machine_mode mode;
2824 enum built_in_function fallback_fn = BUILT_IN_NONE;
2826 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2827 gcc_unreachable ();
2829 arg = CALL_EXPR_ARG (exp, 0);
2831 switch (DECL_FUNCTION_CODE (fndecl))
2833 CASE_FLT_FN (BUILT_IN_IRINT):
2834 fallback_fn = BUILT_IN_LRINT;
2835 gcc_fallthrough ();
2836 CASE_FLT_FN (BUILT_IN_LRINT):
2837 CASE_FLT_FN (BUILT_IN_LLRINT):
2838 builtin_optab = lrint_optab;
2839 break;
2841 CASE_FLT_FN (BUILT_IN_IROUND):
2842 fallback_fn = BUILT_IN_LROUND;
2843 gcc_fallthrough ();
2844 CASE_FLT_FN (BUILT_IN_LROUND):
2845 CASE_FLT_FN (BUILT_IN_LLROUND):
2846 builtin_optab = lround_optab;
2847 break;
2849 default:
2850 gcc_unreachable ();
2853 /* There's no easy way to detect the case we need to set EDOM. */
2854 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2855 return NULL_RTX;
2857 /* Make a suitable register to place result in. */
2858 mode = TYPE_MODE (TREE_TYPE (exp));
2860 /* There's no easy way to detect the case we need to set EDOM. */
2861 if (!flag_errno_math)
2863 rtx result = gen_reg_rtx (mode);
2865 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2866 need to expand the argument again. This way, we will not perform
2867 side-effects more the once. */
2868 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2870 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2872 start_sequence ();
2874 if (expand_sfix_optab (result, op0, builtin_optab))
2876 /* Output the entire sequence. */
2877 insns = get_insns ();
2878 end_sequence ();
2879 emit_insn (insns);
2880 return result;
2883 /* If we were unable to expand via the builtin, stop the sequence
2884 (without outputting the insns) and call to the library function
2885 with the stabilized argument list. */
2886 end_sequence ();
2889 if (fallback_fn != BUILT_IN_NONE)
2891 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2892 targets, (int) round (x) should never be transformed into
2893 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2894 a call to lround in the hope that the target provides at least some
2895 C99 functions. This should result in the best user experience for
2896 not full C99 targets. */
2897 tree fallback_fndecl = mathfn_built_in_1
2898 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2900 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2901 fallback_fndecl, 1, arg);
2903 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2904 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2905 return convert_to_mode (mode, target, 0);
2908 return expand_call (exp, target, target == const0_rtx);
2911 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2912 a normal call should be emitted rather than expanding the function
2913 in-line. EXP is the expression that is a call to the builtin
2914 function; if convenient, the result should be placed in TARGET. */
2916 static rtx
2917 expand_builtin_powi (tree exp, rtx target)
2919 tree arg0, arg1;
2920 rtx op0, op1;
2921 machine_mode mode;
2922 machine_mode mode2;
2924 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2925 return NULL_RTX;
2927 arg0 = CALL_EXPR_ARG (exp, 0);
2928 arg1 = CALL_EXPR_ARG (exp, 1);
2929 mode = TYPE_MODE (TREE_TYPE (exp));
2931 /* Emit a libcall to libgcc. */
2933 /* Mode of the 2nd argument must match that of an int. */
2934 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2936 if (target == NULL_RTX)
2937 target = gen_reg_rtx (mode);
2939 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2940 if (GET_MODE (op0) != mode)
2941 op0 = convert_to_mode (mode, op0, 0);
2942 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2943 if (GET_MODE (op1) != mode2)
2944 op1 = convert_to_mode (mode2, op1, 0);
2946 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2947 target, LCT_CONST, mode,
2948 op0, mode, op1, mode2);
2950 return target;
2953 /* Expand expression EXP which is a call to the strlen builtin. Return
2954 NULL_RTX if we failed and the caller should emit a normal call, otherwise
2955 try to get the result in TARGET, if convenient. */
2957 static rtx
2958 expand_builtin_strlen (tree exp, rtx target,
2959 machine_mode target_mode)
2961 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2962 return NULL_RTX;
2964 struct expand_operand ops[4];
2965 rtx pat;
2966 tree len;
2967 tree src = CALL_EXPR_ARG (exp, 0);
2968 rtx src_reg;
2969 rtx_insn *before_strlen;
2970 machine_mode insn_mode;
2971 enum insn_code icode = CODE_FOR_nothing;
2972 unsigned int align;
2974 /* If the length can be computed at compile-time, return it. */
2975 len = c_strlen (src, 0);
2976 if (len)
2977 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2979 /* If the length can be computed at compile-time and is constant
2980 integer, but there are side-effects in src, evaluate
2981 src for side-effects, then return len.
2982 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2983 can be optimized into: i++; x = 3; */
2984 len = c_strlen (src, 1);
2985 if (len && TREE_CODE (len) == INTEGER_CST)
2987 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2988 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2991 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2993 /* If SRC is not a pointer type, don't do this operation inline. */
2994 if (align == 0)
2995 return NULL_RTX;
2997 /* Bail out if we can't compute strlen in the right mode. */
2998 FOR_EACH_MODE_FROM (insn_mode, target_mode)
3000 icode = optab_handler (strlen_optab, insn_mode);
3001 if (icode != CODE_FOR_nothing)
3002 break;
3004 if (insn_mode == VOIDmode)
3005 return NULL_RTX;
3007 /* Make a place to hold the source address. We will not expand
3008 the actual source until we are sure that the expansion will
3009 not fail -- there are trees that cannot be expanded twice. */
3010 src_reg = gen_reg_rtx (Pmode);
3012 /* Mark the beginning of the strlen sequence so we can emit the
3013 source operand later. */
3014 before_strlen = get_last_insn ();
3016 create_output_operand (&ops[0], target, insn_mode);
3017 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3018 create_integer_operand (&ops[2], 0);
3019 create_integer_operand (&ops[3], align);
3020 if (!maybe_expand_insn (icode, 4, ops))
3021 return NULL_RTX;
3023 /* Check to see if the argument was declared attribute nonstring
3024 and if so, issue a warning since at this point it's not known
3025 to be nul-terminated. */
3026 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3028 /* Now that we are assured of success, expand the source. */
3029 start_sequence ();
3030 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3031 if (pat != src_reg)
3033 #ifdef POINTERS_EXTEND_UNSIGNED
3034 if (GET_MODE (pat) != Pmode)
3035 pat = convert_to_mode (Pmode, pat,
3036 POINTERS_EXTEND_UNSIGNED);
3037 #endif
3038 emit_move_insn (src_reg, pat);
3040 pat = get_insns ();
3041 end_sequence ();
3043 if (before_strlen)
3044 emit_insn_after (pat, before_strlen);
3045 else
3046 emit_insn_before (pat, get_insns ());
3048 /* Return the value in the proper mode for this function. */
3049 if (GET_MODE (ops[0].value) == target_mode)
3050 target = ops[0].value;
3051 else if (target != 0)
3052 convert_move (target, ops[0].value, 0);
3053 else
3054 target = convert_to_mode (target_mode, ops[0].value, 0);
3056 return target;
3059 /* Expand call EXP to the strnlen built-in, returning the result
3060 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3062 static rtx
3063 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3065 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3066 return NULL_RTX;
3068 tree src = CALL_EXPR_ARG (exp, 0);
3069 tree bound = CALL_EXPR_ARG (exp, 1);
3071 if (!bound)
3072 return NULL_RTX;
3074 location_t loc = UNKNOWN_LOCATION;
3075 if (EXPR_HAS_LOCATION (exp))
3076 loc = EXPR_LOCATION (exp);
3078 tree maxobjsize = max_object_size ();
3079 tree func = get_callee_fndecl (exp);
3081 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3082 so these conversions aren't necessary. */
3083 c_strlen_data data;
3084 memset (&data, 0, sizeof (c_strlen_data));
3085 tree len = c_strlen (src, 0, &data, 1);
3086 if (len)
3087 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3089 if (TREE_CODE (bound) == INTEGER_CST)
3091 if (!TREE_NO_WARNING (exp)
3092 && tree_int_cst_lt (maxobjsize, bound)
3093 && warning_at (loc, OPT_Wstringop_overflow_,
3094 "%K%qD specified bound %E "
3095 "exceeds maximum object size %E",
3096 exp, func, bound, maxobjsize))
3097 TREE_NO_WARNING (exp) = true;
3099 bool exact = true;
3100 if (!len || TREE_CODE (len) != INTEGER_CST)
3102 /* Clear EXACT if LEN may be less than SRC suggests,
3103 such as in
3104 strnlen (&a[i], sizeof a)
3105 where the value of i is unknown. Unless i's value is
3106 zero, the call is unsafe because the bound is greater. */
3107 data.decl = unterminated_array (src, &len, &exact);
3108 if (!data.decl)
3109 return NULL_RTX;
3112 if (data.decl
3113 && !TREE_NO_WARNING (exp)
3114 && ((tree_int_cst_lt (len, bound))
3115 || !exact))
3117 location_t warnloc
3118 = expansion_point_location_if_in_system_header (loc);
3120 if (warning_at (warnloc, OPT_Wstringop_overflow_,
3121 exact
3122 ? G_("%K%qD specified bound %E exceeds the size %E "
3123 "of unterminated array")
3124 : G_("%K%qD specified bound %E may exceed the size "
3125 "of at most %E of unterminated array"),
3126 exp, func, bound, len))
3128 inform (DECL_SOURCE_LOCATION (data.decl),
3129 "referenced argument declared here");
3130 TREE_NO_WARNING (exp) = true;
3131 return NULL_RTX;
3135 if (!len)
3136 return NULL_RTX;
3138 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3139 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3142 if (TREE_CODE (bound) != SSA_NAME)
3143 return NULL_RTX;
3145 wide_int min, max;
3146 enum value_range_kind rng = get_range_info (bound, &min, &max);
3147 if (rng != VR_RANGE)
3148 return NULL_RTX;
3150 if (!TREE_NO_WARNING (exp)
3151 && wi::ltu_p (wi::to_wide (maxobjsize), min)
3152 && warning_at (loc, OPT_Wstringop_overflow_,
3153 "%K%qD specified bound [%wu, %wu] "
3154 "exceeds maximum object size %E",
3155 exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
3156 TREE_NO_WARNING (exp) = true;
3158 bool exact = true;
3159 if (!len || TREE_CODE (len) != INTEGER_CST)
3161 data.decl = unterminated_array (src, &len, &exact);
3162 if (!data.decl)
3163 return NULL_RTX;
3166 if (data.decl
3167 && !TREE_NO_WARNING (exp)
3168 && (wi::ltu_p (wi::to_wide (len), min)
3169 || !exact))
3171 location_t warnloc
3172 = expansion_point_location_if_in_system_header (loc);
3174 if (warning_at (warnloc, OPT_Wstringop_overflow_,
3175 exact
3176 ? G_("%K%qD specified bound [%wu, %wu] exceeds "
3177 "the size %E of unterminated array")
3178 : G_("%K%qD specified bound [%wu, %wu] may exceed "
3179 "the size of at most %E of unterminated array"),
3180 exp, func, min.to_uhwi (), max.to_uhwi (), len))
3182 inform (DECL_SOURCE_LOCATION (data.decl),
3183 "referenced argument declared here");
3184 TREE_NO_WARNING (exp) = true;
3188 if (data.decl)
3189 return NULL_RTX;
3191 if (wi::gtu_p (min, wi::to_wide (len)))
3192 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3194 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3195 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3198 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3199 bytes from constant string DATA + OFFSET and return it as target
3200 constant. */
3202 static rtx
3203 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3204 scalar_int_mode mode)
3206 const char *str = (const char *) data;
3208 gcc_assert (offset >= 0
3209 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3210 <= strlen (str) + 1));
3212 return c_readstr (str + offset, mode);
3215 /* LEN specify length of the block of memcpy/memset operation.
3216 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3217 In some cases we can make very likely guess on max size, then we
3218 set it into PROBABLE_MAX_SIZE. */
3220 static void
3221 determine_block_size (tree len, rtx len_rtx,
3222 unsigned HOST_WIDE_INT *min_size,
3223 unsigned HOST_WIDE_INT *max_size,
3224 unsigned HOST_WIDE_INT *probable_max_size)
3226 if (CONST_INT_P (len_rtx))
3228 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3229 return;
3231 else
3233 wide_int min, max;
3234 enum value_range_kind range_type = VR_UNDEFINED;
3236 /* Determine bounds from the type. */
3237 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3238 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3239 else
3240 *min_size = 0;
3241 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3242 *probable_max_size = *max_size
3243 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3244 else
3245 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3247 if (TREE_CODE (len) == SSA_NAME)
3248 range_type = get_range_info (len, &min, &max);
3249 if (range_type == VR_RANGE)
3251 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3252 *min_size = min.to_uhwi ();
3253 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3254 *probable_max_size = *max_size = max.to_uhwi ();
3256 else if (range_type == VR_ANTI_RANGE)
3258 /* Anti range 0...N lets us to determine minimal size to N+1. */
3259 if (min == 0)
3261 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3262 *min_size = max.to_uhwi () + 1;
3264 /* Code like
3266 int n;
3267 if (n < 100)
3268 memcpy (a, b, n)
3270 Produce anti range allowing negative values of N. We still
3271 can use the information and make a guess that N is not negative.
3273 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3274 *probable_max_size = min.to_uhwi () - 1;
3277 gcc_checking_assert (*max_size <=
3278 (unsigned HOST_WIDE_INT)
3279 GET_MODE_MASK (GET_MODE (len_rtx)));
3282 /* Try to verify that the sizes and lengths of the arguments to a string
3283 manipulation function given by EXP are within valid bounds and that
3284 the operation does not lead to buffer overflow or read past the end.
3285 Arguments other than EXP may be null. When non-null, the arguments
3286 have the following meaning:
3287 DST is the destination of a copy call or NULL otherwise.
3288 SRC is the source of a copy call or NULL otherwise.
3289 DSTWRITE is the number of bytes written into the destination obtained
3290 from the user-supplied size argument to the function (such as in
3291 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3292 MAXREAD is the user-supplied bound on the length of the source sequence
3293 (such as in strncat(d, s, N). It specifies the upper limit on the number
3294 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3295 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3296 expression EXP is a string function call (as opposed to a memory call
3297 like memcpy). As an exception, SRCSTR can also be an integer denoting
3298 the precomputed size of the source string or object (for functions like
3299 memcpy).
3300 DSTSIZE is the size of the destination object specified by the last
3301 argument to the _chk builtins, typically resulting from the expansion
3302 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3303 DSTSIZE).
3305 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3306 SIZE_MAX.
3308 If the call is successfully verified as safe return true, otherwise
3309 return false. */
3311 static bool
3312 check_access (tree exp, tree, tree, tree dstwrite,
3313 tree maxread, tree srcstr, tree dstsize)
3315 int opt = OPT_Wstringop_overflow_;
3317 /* The size of the largest object is half the address space, or
3318 PTRDIFF_MAX. (This is way too permissive.) */
3319 tree maxobjsize = max_object_size ();
3321 /* Either the length of the source string for string functions or
3322 the size of the source object for raw memory functions. */
3323 tree slen = NULL_TREE;
3325 tree range[2] = { NULL_TREE, NULL_TREE };
3327 /* Set to true when the exact number of bytes written by a string
3328 function like strcpy is not known and the only thing that is
3329 known is that it must be at least one (for the terminating nul). */
3330 bool at_least_one = false;
3331 if (srcstr)
3333 /* SRCSTR is normally a pointer to string but as a special case
3334 it can be an integer denoting the length of a string. */
3335 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3337 /* Try to determine the range of lengths the source string
3338 refers to. If it can be determined and is less than
3339 the upper bound given by MAXREAD add one to it for
3340 the terminating nul. Otherwise, set it to one for
3341 the same reason, or to MAXREAD as appropriate. */
3342 get_range_strlen (srcstr, range);
3343 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3345 if (maxread && tree_int_cst_le (maxread, range[0]))
3346 range[0] = range[1] = maxread;
3347 else
3348 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3349 range[0], size_one_node);
3351 if (maxread && tree_int_cst_le (maxread, range[1]))
3352 range[1] = maxread;
3353 else if (!integer_all_onesp (range[1]))
3354 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3355 range[1], size_one_node);
3357 slen = range[0];
3359 else
3361 at_least_one = true;
3362 slen = size_one_node;
3365 else
3366 slen = srcstr;
3369 if (!dstwrite && !maxread)
3371 /* When the only available piece of data is the object size
3372 there is nothing to do. */
3373 if (!slen)
3374 return true;
3376 /* Otherwise, when the length of the source sequence is known
3377 (as with strlen), set DSTWRITE to it. */
3378 if (!range[0])
3379 dstwrite = slen;
3382 if (!dstsize)
3383 dstsize = maxobjsize;
3385 if (dstwrite)
3386 get_size_range (dstwrite, range);
3388 tree func = get_callee_fndecl (exp);
3390 /* First check the number of bytes to be written against the maximum
3391 object size. */
3392 if (range[0]
3393 && TREE_CODE (range[0]) == INTEGER_CST
3394 && tree_int_cst_lt (maxobjsize, range[0]))
3396 if (TREE_NO_WARNING (exp))
3397 return false;
3399 location_t loc = tree_nonartificial_location (exp);
3400 loc = expansion_point_location_if_in_system_header (loc);
3402 bool warned;
3403 if (range[0] == range[1])
3404 warned = warning_at (loc, opt,
3405 "%K%qD specified size %E "
3406 "exceeds maximum object size %E",
3407 exp, func, range[0], maxobjsize);
3408 else
3409 warned = warning_at (loc, opt,
3410 "%K%qD specified size between %E and %E "
3411 "exceeds maximum object size %E",
3412 exp, func,
3413 range[0], range[1], maxobjsize);
3414 if (warned)
3415 TREE_NO_WARNING (exp) = true;
3417 return false;
3420 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3421 constant, and in range of unsigned HOST_WIDE_INT. */
3422 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3424 /* Next check the number of bytes to be written against the destination
3425 object size. */
3426 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3428 if (range[0]
3429 && TREE_CODE (range[0]) == INTEGER_CST
3430 && ((tree_fits_uhwi_p (dstsize)
3431 && tree_int_cst_lt (dstsize, range[0]))
3432 || (dstwrite
3433 && tree_fits_uhwi_p (dstwrite)
3434 && tree_int_cst_lt (dstwrite, range[0]))))
3436 if (TREE_NO_WARNING (exp))
3437 return false;
3439 location_t loc = tree_nonartificial_location (exp);
3440 loc = expansion_point_location_if_in_system_header (loc);
3442 if (dstwrite == slen && at_least_one)
3444 /* This is a call to strcpy with a destination of 0 size
3445 and a source of unknown length. The call will write
3446 at least one byte past the end of the destination. */
3447 warning_at (loc, opt,
3448 "%K%qD writing %E or more bytes into a region "
3449 "of size %E overflows the destination",
3450 exp, func, range[0], dstsize);
3452 else if (tree_int_cst_equal (range[0], range[1]))
3453 warning_n (loc, opt, tree_to_uhwi (range[0]),
3454 "%K%qD writing %E byte into a region "
3455 "of size %E overflows the destination",
3456 "%K%qD writing %E bytes into a region "
3457 "of size %E overflows the destination",
3458 exp, func, range[0], dstsize);
3459 else if (tree_int_cst_sign_bit (range[1]))
3461 /* Avoid printing the upper bound if it's invalid. */
3462 warning_at (loc, opt,
3463 "%K%qD writing %E or more bytes into a region "
3464 "of size %E overflows the destination",
3465 exp, func, range[0], dstsize);
3467 else
3468 warning_at (loc, opt,
3469 "%K%qD writing between %E and %E bytes into "
3470 "a region of size %E overflows the destination",
3471 exp, func, range[0], range[1],
3472 dstsize);
3474 /* Return error when an overflow has been detected. */
3475 return false;
3479 /* Check the maximum length of the source sequence against the size
3480 of the destination object if known, or against the maximum size
3481 of an object. */
3482 if (maxread)
3484 get_size_range (maxread, range);
3486 /* Use the lower end for MAXREAD from now on. */
3487 if (range[0])
3488 maxread = range[0];
3490 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3492 location_t loc = tree_nonartificial_location (exp);
3493 loc = expansion_point_location_if_in_system_header (loc);
3495 if (tree_int_cst_lt (maxobjsize, range[0]))
3497 if (TREE_NO_WARNING (exp))
3498 return false;
3500 /* Warn about crazy big sizes first since that's more
3501 likely to be meaningful than saying that the bound
3502 is greater than the object size if both are big. */
3503 if (range[0] == range[1])
3504 warning_at (loc, opt,
3505 "%K%qD specified bound %E "
3506 "exceeds maximum object size %E",
3507 exp, func,
3508 range[0], maxobjsize);
3509 else
3510 warning_at (loc, opt,
3511 "%K%qD specified bound between %E and %E "
3512 "exceeds maximum object size %E",
3513 exp, func,
3514 range[0], range[1], maxobjsize);
3516 return false;
3519 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3521 if (TREE_NO_WARNING (exp))
3522 return false;
3524 if (tree_int_cst_equal (range[0], range[1]))
3525 warning_at (loc, opt,
3526 "%K%qD specified bound %E "
3527 "exceeds destination size %E",
3528 exp, func,
3529 range[0], dstsize);
3530 else
3531 warning_at (loc, opt,
3532 "%K%qD specified bound between %E and %E "
3533 "exceeds destination size %E",
3534 exp, func,
3535 range[0], range[1], dstsize);
3536 return false;
3541 /* Check for reading past the end of SRC. */
3542 if (slen
3543 && slen == srcstr
3544 && dstwrite && range[0]
3545 && tree_int_cst_lt (slen, range[0]))
3547 if (TREE_NO_WARNING (exp))
3548 return false;
3550 location_t loc = tree_nonartificial_location (exp);
3552 if (tree_int_cst_equal (range[0], range[1]))
3553 warning_n (loc, opt, tree_to_uhwi (range[0]),
3554 "%K%qD reading %E byte from a region of size %E",
3555 "%K%qD reading %E bytes from a region of size %E",
3556 exp, func, range[0], slen);
3557 else if (tree_int_cst_sign_bit (range[1]))
3559 /* Avoid printing the upper bound if it's invalid. */
3560 warning_at (loc, opt,
3561 "%K%qD reading %E or more bytes from a region "
3562 "of size %E",
3563 exp, func, range[0], slen);
3565 else
3566 warning_at (loc, opt,
3567 "%K%qD reading between %E and %E bytes from a region "
3568 "of size %E",
3569 exp, func, range[0], range[1], slen);
3570 return false;
3573 return true;
3576 /* Helper to compute the size of the object referenced by the DEST
3577 expression which must have pointer type, using Object Size type
3578 OSTYPE (only the least significant 2 bits are used). Return
3579 an estimate of the size of the object if successful or NULL when
3580 the size cannot be determined. When the referenced object involves
3581 a non-constant offset in some range the returned value represents
3582 the largest size given the smallest non-negative offset in the
3583 range. The function is intended for diagnostics and should not
3584 be used to influence code generation or optimization. */
3586 tree
3587 compute_objsize (tree dest, int ostype)
3589 unsigned HOST_WIDE_INT size;
3591 /* Only the two least significant bits are meaningful. */
3592 ostype &= 3;
3594 if (compute_builtin_object_size (dest, ostype, &size))
3595 return build_int_cst (sizetype, size);
3597 if (TREE_CODE (dest) == SSA_NAME)
3599 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3600 if (!is_gimple_assign (stmt))
3601 return NULL_TREE;
3603 dest = gimple_assign_rhs1 (stmt);
3605 tree_code code = gimple_assign_rhs_code (stmt);
3606 if (code == POINTER_PLUS_EXPR)
3608 /* compute_builtin_object_size fails for addresses with
3609 non-constant offsets. Try to determine the range of
3610 such an offset here and use it to adjust the constant
3611 size. */
3612 tree off = gimple_assign_rhs2 (stmt);
3613 if (TREE_CODE (off) == INTEGER_CST)
3615 if (tree size = compute_objsize (dest, ostype))
3617 wide_int wioff = wi::to_wide (off);
3618 wide_int wisiz = wi::to_wide (size);
3620 /* Ignore negative offsets for now. For others,
3621 use the lower bound as the most optimistic
3622 estimate of the (remaining) size. */
3623 if (wi::sign_mask (wioff))
3625 else if (wi::ltu_p (wioff, wisiz))
3626 return wide_int_to_tree (TREE_TYPE (size),
3627 wi::sub (wisiz, wioff));
3628 else
3629 return size_zero_node;
3632 else if (TREE_CODE (off) == SSA_NAME
3633 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3635 wide_int min, max;
3636 enum value_range_kind rng = get_range_info (off, &min, &max);
3638 if (rng == VR_RANGE)
3640 if (tree size = compute_objsize (dest, ostype))
3642 wide_int wisiz = wi::to_wide (size);
3644 /* Ignore negative offsets for now. For others,
3645 use the lower bound as the most optimistic
3646 estimate of the (remaining)size. */
3647 if (wi::sign_mask (min))
3649 else if (wi::ltu_p (min, wisiz))
3650 return wide_int_to_tree (TREE_TYPE (size),
3651 wi::sub (wisiz, min));
3652 else
3653 return size_zero_node;
3658 else if (code != ADDR_EXPR)
3659 return NULL_TREE;
3662 /* Unless computing the largest size (for memcpy and other raw memory
3663 functions), try to determine the size of the object from its type. */
3664 if (!ostype)
3665 return NULL_TREE;
3667 if (TREE_CODE (dest) != ADDR_EXPR)
3668 return NULL_TREE;
3670 tree type = TREE_TYPE (dest);
3671 if (TREE_CODE (type) == POINTER_TYPE)
3672 type = TREE_TYPE (type);
3674 type = TYPE_MAIN_VARIANT (type);
3676 if (TREE_CODE (type) == ARRAY_TYPE
3677 && !array_at_struct_end_p (TREE_OPERAND (dest, 0)))
3679 /* Return the constant size unless it's zero (that's a zero-length
3680 array likely at the end of a struct). */
3681 tree size = TYPE_SIZE_UNIT (type);
3682 if (size && TREE_CODE (size) == INTEGER_CST
3683 && !integer_zerop (size))
3684 return size;
3687 return NULL_TREE;
3690 /* Helper to determine and check the sizes of the source and the destination
3691 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3692 call expression, DEST is the destination argument, SRC is the source
3693 argument or null, and LEN is the number of bytes. Use Object Size type-0
3694 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3695 (no overflow or invalid sizes), false otherwise. */
3697 static bool
3698 check_memop_access (tree exp, tree dest, tree src, tree size)
3700 /* For functions like memset and memcpy that operate on raw memory
3701 try to determine the size of the largest source and destination
3702 object using type-0 Object Size regardless of the object size
3703 type specified by the option. */
3704 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3705 tree dstsize = compute_objsize (dest, 0);
3707 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3708 srcsize, dstsize);
3711 /* Validate memchr arguments without performing any expansion.
3712 Return NULL_RTX. */
3714 static rtx
3715 expand_builtin_memchr (tree exp, rtx)
3717 if (!validate_arglist (exp,
3718 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3719 return NULL_RTX;
3721 tree arg1 = CALL_EXPR_ARG (exp, 0);
3722 tree len = CALL_EXPR_ARG (exp, 2);
3724 /* Diagnose calls where the specified length exceeds the size
3725 of the object. */
3726 if (warn_stringop_overflow)
3728 tree size = compute_objsize (arg1, 0);
3729 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3730 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
3733 return NULL_RTX;
3736 /* Expand a call EXP to the memcpy builtin.
3737 Return NULL_RTX if we failed, the caller should emit a normal call,
3738 otherwise try to get the result in TARGET, if convenient (and in
3739 mode MODE if that's convenient). */
3741 static rtx
3742 expand_builtin_memcpy (tree exp, rtx target)
3744 if (!validate_arglist (exp,
3745 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3746 return NULL_RTX;
3748 tree dest = CALL_EXPR_ARG (exp, 0);
3749 tree src = CALL_EXPR_ARG (exp, 1);
3750 tree len = CALL_EXPR_ARG (exp, 2);
3752 check_memop_access (exp, dest, src, len);
3754 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3755 /*retmode=*/ RETURN_BEGIN);
3758 /* Check a call EXP to the memmove built-in for validity.
3759 Return NULL_RTX on both success and failure. */
3761 static rtx
3762 expand_builtin_memmove (tree exp, rtx)
3764 if (!validate_arglist (exp,
3765 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3766 return NULL_RTX;
3768 tree dest = CALL_EXPR_ARG (exp, 0);
3769 tree src = CALL_EXPR_ARG (exp, 1);
3770 tree len = CALL_EXPR_ARG (exp, 2);
3772 check_memop_access (exp, dest, src, len);
3774 return NULL_RTX;
3777 /* Expand a call EXP to the mempcpy builtin.
3778 Return NULL_RTX if we failed; the caller should emit a normal call,
3779 otherwise try to get the result in TARGET, if convenient (and in
3780 mode MODE if that's convenient). */
3782 static rtx
3783 expand_builtin_mempcpy (tree exp, rtx target)
3785 if (!validate_arglist (exp,
3786 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3787 return NULL_RTX;
3789 tree dest = CALL_EXPR_ARG (exp, 0);
3790 tree src = CALL_EXPR_ARG (exp, 1);
3791 tree len = CALL_EXPR_ARG (exp, 2);
3793 /* Policy does not generally allow using compute_objsize (which
3794 is used internally by check_memop_size) to change code generation
3795 or drive optimization decisions.
3797 In this instance it is safe because the code we generate has
3798 the same semantics regardless of the return value of
3799 check_memop_sizes. Exactly the same amount of data is copied
3800 and the return value is exactly the same in both cases.
3802 Furthermore, check_memop_size always uses mode 0 for the call to
3803 compute_objsize, so the imprecise nature of compute_objsize is
3804 avoided. */
3806 /* Avoid expanding mempcpy into memcpy when the call is determined
3807 to overflow the buffer. This also prevents the same overflow
3808 from being diagnosed again when expanding memcpy. */
3809 if (!check_memop_access (exp, dest, src, len))
3810 return NULL_RTX;
3812 return expand_builtin_mempcpy_args (dest, src, len,
3813 target, exp, /*retmode=*/ RETURN_END);
3816 /* Helper function to do the actual work for expand of memory copy family
3817 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3818 of memory from SRC to DEST and assign to TARGET if convenient. Return
3819 value is based on RETMODE argument. */
3821 static rtx
3822 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3823 rtx target, tree exp, memop_ret retmode)
3825 const char *src_str;
3826 unsigned int src_align = get_pointer_alignment (src);
3827 unsigned int dest_align = get_pointer_alignment (dest);
3828 rtx dest_mem, src_mem, dest_addr, len_rtx;
3829 HOST_WIDE_INT expected_size = -1;
3830 unsigned int expected_align = 0;
3831 unsigned HOST_WIDE_INT min_size;
3832 unsigned HOST_WIDE_INT max_size;
3833 unsigned HOST_WIDE_INT probable_max_size;
3835 /* If DEST is not a pointer type, call the normal function. */
3836 if (dest_align == 0)
3837 return NULL_RTX;
3839 /* If either SRC is not a pointer type, don't do this
3840 operation in-line. */
3841 if (src_align == 0)
3842 return NULL_RTX;
3844 if (currently_expanding_gimple_stmt)
3845 stringop_block_profile (currently_expanding_gimple_stmt,
3846 &expected_align, &expected_size);
3848 if (expected_align < dest_align)
3849 expected_align = dest_align;
3850 dest_mem = get_memory_rtx (dest, len);
3851 set_mem_align (dest_mem, dest_align);
3852 len_rtx = expand_normal (len);
3853 determine_block_size (len, len_rtx, &min_size, &max_size,
3854 &probable_max_size);
3855 src_str = c_getstr (src);
3857 /* If SRC is a string constant and block move would be done
3858 by pieces, we can avoid loading the string from memory
3859 and only stored the computed constants. */
3860 if (src_str
3861 && CONST_INT_P (len_rtx)
3862 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3863 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3864 CONST_CAST (char *, src_str),
3865 dest_align, false))
3867 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3868 builtin_memcpy_read_str,
3869 CONST_CAST (char *, src_str),
3870 dest_align, false, retmode);
3871 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3872 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3873 return dest_mem;
3876 src_mem = get_memory_rtx (src, len);
3877 set_mem_align (src_mem, src_align);
3879 /* Copy word part most expediently. */
3880 enum block_op_methods method = BLOCK_OP_NORMAL;
3881 if (CALL_EXPR_TAILCALL (exp)
3882 && (retmode == RETURN_BEGIN || target == const0_rtx))
3883 method = BLOCK_OP_TAILCALL;
3884 if (retmode == RETURN_END && target != const0_rtx)
3885 method = BLOCK_OP_NO_LIBCALL_RET;
3886 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3887 expected_align, expected_size,
3888 min_size, max_size, probable_max_size);
3889 if (dest_addr == pc_rtx)
3890 return NULL_RTX;
3892 if (dest_addr == 0)
3894 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3895 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3898 if (retmode != RETURN_BEGIN && target != const0_rtx)
3900 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3901 /* stpcpy pointer to last byte. */
3902 if (retmode == RETURN_END_MINUS_ONE)
3903 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3906 return dest_addr;
3909 static rtx
3910 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3911 rtx target, tree orig_exp, memop_ret retmode)
3913 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3914 retmode);
3917 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3918 we failed, the caller should emit a normal call, otherwise try to
3919 get the result in TARGET, if convenient.
3920 Return value is based on RETMODE argument. */
3922 static rtx
3923 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
3925 struct expand_operand ops[3];
3926 rtx dest_mem;
3927 rtx src_mem;
3929 if (!targetm.have_movstr ())
3930 return NULL_RTX;
3932 dest_mem = get_memory_rtx (dest, NULL);
3933 src_mem = get_memory_rtx (src, NULL);
3934 if (retmode != RETURN_BEGIN)
3936 target = force_reg (Pmode, XEXP (dest_mem, 0));
3937 dest_mem = replace_equiv_address (dest_mem, target);
3940 create_output_operand (&ops[0], retmode ? target : NULL_RTX, Pmode);
3941 create_fixed_operand (&ops[1], dest_mem);
3942 create_fixed_operand (&ops[2], src_mem);
3943 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3944 return NULL_RTX;
3946 if (retmode != RETURN_BEGIN && target != const0_rtx)
3948 target = ops[0].value;
3949 /* movstr is supposed to set end to the address of the NUL
3950 terminator. If the caller requested a mempcpy-like return value,
3951 adjust it. */
3952 if (retmode == RETURN_END)
3954 rtx tem = plus_constant (GET_MODE (target),
3955 gen_lowpart (GET_MODE (target), target), 1);
3956 emit_move_insn (target, force_operand (tem, NULL_RTX));
3959 return target;
3962 /* Do some very basic size validation of a call to the strcpy builtin
3963 given by EXP. Return NULL_RTX to have the built-in expand to a call
3964 to the library function. */
3966 static rtx
3967 expand_builtin_strcat (tree exp, rtx)
3969 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3970 || !warn_stringop_overflow)
3971 return NULL_RTX;
3973 tree dest = CALL_EXPR_ARG (exp, 0);
3974 tree src = CALL_EXPR_ARG (exp, 1);
3976 /* There is no way here to determine the length of the string in
3977 the destination to which the SRC string is being appended so
3978 just diagnose cases when the souce string is longer than
3979 the destination object. */
3981 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3983 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
3984 destsize);
3986 return NULL_RTX;
3989 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3990 NULL_RTX if we failed the caller should emit a normal call, otherwise
3991 try to get the result in TARGET, if convenient (and in mode MODE if that's
3992 convenient). */
3994 static rtx
3995 expand_builtin_strcpy (tree exp, rtx target)
3997 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3998 return NULL_RTX;
4000 tree dest = CALL_EXPR_ARG (exp, 0);
4001 tree src = CALL_EXPR_ARG (exp, 1);
4003 if (warn_stringop_overflow)
4005 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4006 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4007 src, destsize);
4010 if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
4012 /* Check to see if the argument was declared attribute nonstring
4013 and if so, issue a warning since at this point it's not known
4014 to be nul-terminated. */
4015 tree fndecl = get_callee_fndecl (exp);
4016 maybe_warn_nonstring_arg (fndecl, exp);
4017 return ret;
4020 return NULL_RTX;
4023 /* Helper function to do the actual work for expand_builtin_strcpy. The
4024 arguments to the builtin_strcpy call DEST and SRC are broken out
4025 so that this can also be called without constructing an actual CALL_EXPR.
4026 The other arguments and return value are the same as for
4027 expand_builtin_strcpy. */
4029 static rtx
4030 expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target)
4032 /* Detect strcpy calls with unterminated arrays.. */
4033 if (tree nonstr = unterminated_array (src))
4035 /* NONSTR refers to the non-nul terminated constant array. */
4036 if (!TREE_NO_WARNING (exp))
4037 warn_string_no_nul (EXPR_LOCATION (exp), "strcpy", src, nonstr);
4038 return NULL_RTX;
4041 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
4044 /* Expand a call EXP to the stpcpy builtin.
4045 Return NULL_RTX if we failed the caller should emit a normal call,
4046 otherwise try to get the result in TARGET, if convenient (and in
4047 mode MODE if that's convenient). */
4049 static rtx
4050 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
4052 tree dst, src;
4053 location_t loc = EXPR_LOCATION (exp);
4055 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4056 return NULL_RTX;
4058 dst = CALL_EXPR_ARG (exp, 0);
4059 src = CALL_EXPR_ARG (exp, 1);
4061 if (warn_stringop_overflow)
4063 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
4064 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4065 src, destsize);
4068 /* If return value is ignored, transform stpcpy into strcpy. */
4069 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
4071 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
4072 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
4073 return expand_expr (result, target, mode, EXPAND_NORMAL);
4075 else
4077 tree len, lenp1;
4078 rtx ret;
4080 /* Ensure we get an actual string whose length can be evaluated at
4081 compile-time, not an expression containing a string. This is
4082 because the latter will potentially produce pessimized code
4083 when used to produce the return value. */
4084 c_strlen_data data;
4085 memset (&data, 0, sizeof (c_strlen_data));
4086 if (!c_getstr (src, NULL)
4087 || !(len = c_strlen (src, 0, &data, 1)))
4088 return expand_movstr (dst, src, target,
4089 /*retmode=*/ RETURN_END_MINUS_ONE);
4091 if (data.decl && !TREE_NO_WARNING (exp))
4092 warn_string_no_nul (EXPR_LOCATION (exp), "stpcpy", src, data.decl);
4094 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
4095 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
4096 target, exp,
4097 /*retmode=*/ RETURN_END_MINUS_ONE);
4099 if (ret)
4100 return ret;
4102 if (TREE_CODE (len) == INTEGER_CST)
4104 rtx len_rtx = expand_normal (len);
4106 if (CONST_INT_P (len_rtx))
4108 ret = expand_builtin_strcpy_args (exp, dst, src, target);
4110 if (ret)
4112 if (! target)
4114 if (mode != VOIDmode)
4115 target = gen_reg_rtx (mode);
4116 else
4117 target = gen_reg_rtx (GET_MODE (ret));
4119 if (GET_MODE (target) != GET_MODE (ret))
4120 ret = gen_lowpart (GET_MODE (target), ret);
4122 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
4123 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
4124 gcc_assert (ret);
4126 return target;
4131 return expand_movstr (dst, src, target,
4132 /*retmode=*/ RETURN_END_MINUS_ONE);
4136 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
4137 arguments while being careful to avoid duplicate warnings (which could
4138 be issued if the expander were to expand the call, resulting in it
4139 being emitted in expand_call(). */
4141 static rtx
4142 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
4144 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
4146 /* The call has been successfully expanded. Check for nonstring
4147 arguments and issue warnings as appropriate. */
4148 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
4149 return ret;
4152 return NULL_RTX;
4155 /* Check a call EXP to the stpncpy built-in for validity.
4156 Return NULL_RTX on both success and failure. */
4158 static rtx
4159 expand_builtin_stpncpy (tree exp, rtx)
4161 if (!validate_arglist (exp,
4162 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4163 || !warn_stringop_overflow)
4164 return NULL_RTX;
4166 /* The source and destination of the call. */
4167 tree dest = CALL_EXPR_ARG (exp, 0);
4168 tree src = CALL_EXPR_ARG (exp, 1);
4170 /* The exact number of bytes to write (not the maximum). */
4171 tree len = CALL_EXPR_ARG (exp, 2);
4173 /* The size of the destination object. */
4174 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4176 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
4178 return NULL_RTX;
4181 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4182 bytes from constant string DATA + OFFSET and return it as target
4183 constant. */
4186 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
4187 scalar_int_mode mode)
4189 const char *str = (const char *) data;
4191 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4192 return const0_rtx;
4194 return c_readstr (str + offset, mode);
4197 /* Helper to check the sizes of sequences and the destination of calls
4198 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4199 success (no overflow or invalid sizes), false otherwise. */
4201 static bool
4202 check_strncat_sizes (tree exp, tree objsize)
4204 tree dest = CALL_EXPR_ARG (exp, 0);
4205 tree src = CALL_EXPR_ARG (exp, 1);
4206 tree maxread = CALL_EXPR_ARG (exp, 2);
4208 /* Try to determine the range of lengths that the source expression
4209 refers to. */
4210 tree lenrange[2];
4211 get_range_strlen (src, lenrange);
4213 /* Try to verify that the destination is big enough for the shortest
4214 string. */
4216 if (!objsize && warn_stringop_overflow)
4218 /* If it hasn't been provided by __strncat_chk, try to determine
4219 the size of the destination object into which the source is
4220 being copied. */
4221 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
4224 /* Add one for the terminating nul. */
4225 tree srclen = (lenrange[0]
4226 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4227 size_one_node)
4228 : NULL_TREE);
4230 /* The strncat function copies at most MAXREAD bytes and always appends
4231 the terminating nul so the specified upper bound should never be equal
4232 to (or greater than) the size of the destination. */
4233 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4234 && tree_int_cst_equal (objsize, maxread))
4236 location_t loc = tree_nonartificial_location (exp);
4237 loc = expansion_point_location_if_in_system_header (loc);
4239 warning_at (loc, OPT_Wstringop_overflow_,
4240 "%K%qD specified bound %E equals destination size",
4241 exp, get_callee_fndecl (exp), maxread);
4243 return false;
4246 if (!srclen
4247 || (maxread && tree_fits_uhwi_p (maxread)
4248 && tree_fits_uhwi_p (srclen)
4249 && tree_int_cst_lt (maxread, srclen)))
4250 srclen = maxread;
4252 /* The number of bytes to write is LEN but check_access will also
4253 check SRCLEN if LEN's value isn't known. */
4254 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4255 objsize);
4258 /* Similar to expand_builtin_strcat, do some very basic size validation
4259 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4260 the built-in expand to a call to the library function. */
4262 static rtx
4263 expand_builtin_strncat (tree exp, rtx)
4265 if (!validate_arglist (exp,
4266 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4267 || !warn_stringop_overflow)
4268 return NULL_RTX;
4270 tree dest = CALL_EXPR_ARG (exp, 0);
4271 tree src = CALL_EXPR_ARG (exp, 1);
4272 /* The upper bound on the number of bytes to write. */
4273 tree maxread = CALL_EXPR_ARG (exp, 2);
4274 /* The length of the source sequence. */
4275 tree slen = c_strlen (src, 1);
4277 /* Try to determine the range of lengths that the source expression
4278 refers to. */
4279 tree lenrange[2];
4280 if (slen)
4281 lenrange[0] = lenrange[1] = slen;
4282 else
4283 get_range_strlen (src, lenrange);
4285 /* Try to verify that the destination is big enough for the shortest
4286 string. First try to determine the size of the destination object
4287 into which the source is being copied. */
4288 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4290 /* Add one for the terminating nul. */
4291 tree srclen = (lenrange[0]
4292 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4293 size_one_node)
4294 : NULL_TREE);
4296 /* The strncat function copies at most MAXREAD bytes and always appends
4297 the terminating nul so the specified upper bound should never be equal
4298 to (or greater than) the size of the destination. */
4299 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4300 && tree_int_cst_equal (destsize, maxread))
4302 location_t loc = tree_nonartificial_location (exp);
4303 loc = expansion_point_location_if_in_system_header (loc);
4305 warning_at (loc, OPT_Wstringop_overflow_,
4306 "%K%qD specified bound %E equals destination size",
4307 exp, get_callee_fndecl (exp), maxread);
4309 return NULL_RTX;
4312 if (!srclen
4313 || (maxread && tree_fits_uhwi_p (maxread)
4314 && tree_fits_uhwi_p (srclen)
4315 && tree_int_cst_lt (maxread, srclen)))
4316 srclen = maxread;
4318 /* The number of bytes to write is SRCLEN. */
4319 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
4321 return NULL_RTX;
4324 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4325 NULL_RTX if we failed the caller should emit a normal call. */
4327 static rtx
4328 expand_builtin_strncpy (tree exp, rtx target)
4330 location_t loc = EXPR_LOCATION (exp);
4332 if (validate_arglist (exp,
4333 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4335 tree dest = CALL_EXPR_ARG (exp, 0);
4336 tree src = CALL_EXPR_ARG (exp, 1);
4337 /* The number of bytes to write (not the maximum). */
4338 tree len = CALL_EXPR_ARG (exp, 2);
4339 /* The length of the source sequence. */
4340 tree slen = c_strlen (src, 1);
4342 if (warn_stringop_overflow)
4344 tree destsize = compute_objsize (dest,
4345 warn_stringop_overflow - 1);
4347 /* The number of bytes to write is LEN but check_access will also
4348 check SLEN if LEN's value isn't known. */
4349 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4350 destsize);
4353 /* We must be passed a constant len and src parameter. */
4354 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4355 return NULL_RTX;
4357 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4359 /* We're required to pad with trailing zeros if the requested
4360 len is greater than strlen(s2)+1. In that case try to
4361 use store_by_pieces, if it fails, punt. */
4362 if (tree_int_cst_lt (slen, len))
4364 unsigned int dest_align = get_pointer_alignment (dest);
4365 const char *p = c_getstr (src);
4366 rtx dest_mem;
4368 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4369 || !can_store_by_pieces (tree_to_uhwi (len),
4370 builtin_strncpy_read_str,
4371 CONST_CAST (char *, p),
4372 dest_align, false))
4373 return NULL_RTX;
4375 dest_mem = get_memory_rtx (dest, len);
4376 store_by_pieces (dest_mem, tree_to_uhwi (len),
4377 builtin_strncpy_read_str,
4378 CONST_CAST (char *, p), dest_align, false,
4379 RETURN_BEGIN);
4380 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4381 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4382 return dest_mem;
4385 return NULL_RTX;
4388 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4389 bytes from constant string DATA + OFFSET and return it as target
4390 constant. */
4393 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4394 scalar_int_mode mode)
4396 const char *c = (const char *) data;
4397 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4399 memset (p, *c, GET_MODE_SIZE (mode));
4401 return c_readstr (p, mode);
4404 /* Callback routine for store_by_pieces. Return the RTL of a register
4405 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4406 char value given in the RTL register data. For example, if mode is
4407 4 bytes wide, return the RTL for 0x01010101*data. */
4409 static rtx
4410 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4411 scalar_int_mode mode)
4413 rtx target, coeff;
4414 size_t size;
4415 char *p;
4417 size = GET_MODE_SIZE (mode);
4418 if (size == 1)
4419 return (rtx) data;
4421 p = XALLOCAVEC (char, size);
4422 memset (p, 1, size);
4423 coeff = c_readstr (p, mode);
4425 target = convert_to_mode (mode, (rtx) data, 1);
4426 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4427 return force_reg (mode, target);
4430 /* Expand expression EXP, which is a call to the memset builtin. Return
4431 NULL_RTX if we failed the caller should emit a normal call, otherwise
4432 try to get the result in TARGET, if convenient (and in mode MODE if that's
4433 convenient). */
4435 static rtx
4436 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4438 if (!validate_arglist (exp,
4439 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4440 return NULL_RTX;
4442 tree dest = CALL_EXPR_ARG (exp, 0);
4443 tree val = CALL_EXPR_ARG (exp, 1);
4444 tree len = CALL_EXPR_ARG (exp, 2);
4446 check_memop_access (exp, dest, NULL_TREE, len);
4448 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4451 /* Helper function to do the actual work for expand_builtin_memset. The
4452 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4453 so that this can also be called without constructing an actual CALL_EXPR.
4454 The other arguments and return value are the same as for
4455 expand_builtin_memset. */
4457 static rtx
4458 expand_builtin_memset_args (tree dest, tree val, tree len,
4459 rtx target, machine_mode mode, tree orig_exp)
4461 tree fndecl, fn;
4462 enum built_in_function fcode;
4463 machine_mode val_mode;
4464 char c;
4465 unsigned int dest_align;
4466 rtx dest_mem, dest_addr, len_rtx;
4467 HOST_WIDE_INT expected_size = -1;
4468 unsigned int expected_align = 0;
4469 unsigned HOST_WIDE_INT min_size;
4470 unsigned HOST_WIDE_INT max_size;
4471 unsigned HOST_WIDE_INT probable_max_size;
4473 dest_align = get_pointer_alignment (dest);
4475 /* If DEST is not a pointer type, don't do this operation in-line. */
4476 if (dest_align == 0)
4477 return NULL_RTX;
4479 if (currently_expanding_gimple_stmt)
4480 stringop_block_profile (currently_expanding_gimple_stmt,
4481 &expected_align, &expected_size);
4483 if (expected_align < dest_align)
4484 expected_align = dest_align;
4486 /* If the LEN parameter is zero, return DEST. */
4487 if (integer_zerop (len))
4489 /* Evaluate and ignore VAL in case it has side-effects. */
4490 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4491 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4494 /* Stabilize the arguments in case we fail. */
4495 dest = builtin_save_expr (dest);
4496 val = builtin_save_expr (val);
4497 len = builtin_save_expr (len);
4499 len_rtx = expand_normal (len);
4500 determine_block_size (len, len_rtx, &min_size, &max_size,
4501 &probable_max_size);
4502 dest_mem = get_memory_rtx (dest, len);
4503 val_mode = TYPE_MODE (unsigned_char_type_node);
4505 if (TREE_CODE (val) != INTEGER_CST)
4507 rtx val_rtx;
4509 val_rtx = expand_normal (val);
4510 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4512 /* Assume that we can memset by pieces if we can store
4513 * the coefficients by pieces (in the required modes).
4514 * We can't pass builtin_memset_gen_str as that emits RTL. */
4515 c = 1;
4516 if (tree_fits_uhwi_p (len)
4517 && can_store_by_pieces (tree_to_uhwi (len),
4518 builtin_memset_read_str, &c, dest_align,
4519 true))
4521 val_rtx = force_reg (val_mode, val_rtx);
4522 store_by_pieces (dest_mem, tree_to_uhwi (len),
4523 builtin_memset_gen_str, val_rtx, dest_align,
4524 true, RETURN_BEGIN);
4526 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4527 dest_align, expected_align,
4528 expected_size, min_size, max_size,
4529 probable_max_size))
4530 goto do_libcall;
4532 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4533 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4534 return dest_mem;
4537 if (target_char_cast (val, &c))
4538 goto do_libcall;
4540 if (c)
4542 if (tree_fits_uhwi_p (len)
4543 && can_store_by_pieces (tree_to_uhwi (len),
4544 builtin_memset_read_str, &c, dest_align,
4545 true))
4546 store_by_pieces (dest_mem, tree_to_uhwi (len),
4547 builtin_memset_read_str, &c, dest_align, true,
4548 RETURN_BEGIN);
4549 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4550 gen_int_mode (c, val_mode),
4551 dest_align, expected_align,
4552 expected_size, min_size, max_size,
4553 probable_max_size))
4554 goto do_libcall;
4556 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4557 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4558 return dest_mem;
4561 set_mem_align (dest_mem, dest_align);
4562 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4563 CALL_EXPR_TAILCALL (orig_exp)
4564 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4565 expected_align, expected_size,
4566 min_size, max_size,
4567 probable_max_size);
4569 if (dest_addr == 0)
4571 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4572 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4575 return dest_addr;
4577 do_libcall:
4578 fndecl = get_callee_fndecl (orig_exp);
4579 fcode = DECL_FUNCTION_CODE (fndecl);
4580 if (fcode == BUILT_IN_MEMSET)
4581 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4582 dest, val, len);
4583 else if (fcode == BUILT_IN_BZERO)
4584 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4585 dest, len);
4586 else
4587 gcc_unreachable ();
4588 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4589 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4590 return expand_call (fn, target, target == const0_rtx);
4593 /* Expand expression EXP, which is a call to the bzero builtin. Return
4594 NULL_RTX if we failed the caller should emit a normal call. */
4596 static rtx
4597 expand_builtin_bzero (tree exp)
4599 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4600 return NULL_RTX;
4602 tree dest = CALL_EXPR_ARG (exp, 0);
4603 tree size = CALL_EXPR_ARG (exp, 1);
4605 check_memop_access (exp, dest, NULL_TREE, size);
4607 /* New argument list transforming bzero(ptr x, int y) to
4608 memset(ptr x, int 0, size_t y). This is done this way
4609 so that if it isn't expanded inline, we fallback to
4610 calling bzero instead of memset. */
4612 location_t loc = EXPR_LOCATION (exp);
4614 return expand_builtin_memset_args (dest, integer_zero_node,
4615 fold_convert_loc (loc,
4616 size_type_node, size),
4617 const0_rtx, VOIDmode, exp);
4620 /* Try to expand cmpstr operation ICODE with the given operands.
4621 Return the result rtx on success, otherwise return null. */
4623 static rtx
4624 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4625 HOST_WIDE_INT align)
4627 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4629 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4630 target = NULL_RTX;
4632 struct expand_operand ops[4];
4633 create_output_operand (&ops[0], target, insn_mode);
4634 create_fixed_operand (&ops[1], arg1_rtx);
4635 create_fixed_operand (&ops[2], arg2_rtx);
4636 create_integer_operand (&ops[3], align);
4637 if (maybe_expand_insn (icode, 4, ops))
4638 return ops[0].value;
4639 return NULL_RTX;
4642 /* Expand expression EXP, which is a call to the memcmp built-in function.
4643 Return NULL_RTX if we failed and the caller should emit a normal call,
4644 otherwise try to get the result in TARGET, if convenient.
4645 RESULT_EQ is true if we can relax the returned value to be either zero
4646 or nonzero, without caring about the sign. */
4648 static rtx
4649 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4651 if (!validate_arglist (exp,
4652 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4653 return NULL_RTX;
4655 tree arg1 = CALL_EXPR_ARG (exp, 0);
4656 tree arg2 = CALL_EXPR_ARG (exp, 1);
4657 tree len = CALL_EXPR_ARG (exp, 2);
4658 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4659 bool no_overflow = true;
4661 /* Diagnose calls where the specified length exceeds the size of either
4662 object. */
4663 tree size = compute_objsize (arg1, 0);
4664 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4665 len, /*maxread=*/NULL_TREE, size,
4666 /*objsize=*/NULL_TREE);
4667 if (no_overflow)
4669 size = compute_objsize (arg2, 0);
4670 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4671 len, /*maxread=*/NULL_TREE, size,
4672 /*objsize=*/NULL_TREE);
4675 /* If the specified length exceeds the size of either object,
4676 call the function. */
4677 if (!no_overflow)
4678 return NULL_RTX;
4680 /* Due to the performance benefit, always inline the calls first
4681 when result_eq is false. */
4682 rtx result = NULL_RTX;
4684 if (!result_eq && fcode != BUILT_IN_BCMP)
4686 result = inline_expand_builtin_string_cmp (exp, target);
4687 if (result)
4688 return result;
4691 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4692 location_t loc = EXPR_LOCATION (exp);
4694 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4695 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4697 /* If we don't have POINTER_TYPE, call the function. */
4698 if (arg1_align == 0 || arg2_align == 0)
4699 return NULL_RTX;
4701 rtx arg1_rtx = get_memory_rtx (arg1, len);
4702 rtx arg2_rtx = get_memory_rtx (arg2, len);
4703 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4705 /* Set MEM_SIZE as appropriate. */
4706 if (CONST_INT_P (len_rtx))
4708 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4709 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4712 by_pieces_constfn constfn = NULL;
4714 const char *src_str = c_getstr (arg2);
4715 if (result_eq && src_str == NULL)
4717 src_str = c_getstr (arg1);
4718 if (src_str != NULL)
4719 std::swap (arg1_rtx, arg2_rtx);
4722 /* If SRC is a string constant and block move would be done
4723 by pieces, we can avoid loading the string from memory
4724 and only stored the computed constants. */
4725 if (src_str
4726 && CONST_INT_P (len_rtx)
4727 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4728 constfn = builtin_memcpy_read_str;
4730 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4731 TREE_TYPE (len), target,
4732 result_eq, constfn,
4733 CONST_CAST (char *, src_str));
4735 if (result)
4737 /* Return the value in the proper mode for this function. */
4738 if (GET_MODE (result) == mode)
4739 return result;
4741 if (target != 0)
4743 convert_move (target, result, 0);
4744 return target;
4747 return convert_to_mode (mode, result, 0);
4750 return NULL_RTX;
4753 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4754 if we failed the caller should emit a normal call, otherwise try to get
4755 the result in TARGET, if convenient. */
4757 static rtx
4758 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4760 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4761 return NULL_RTX;
4763 /* Due to the performance benefit, always inline the calls first. */
4764 rtx result = NULL_RTX;
4765 result = inline_expand_builtin_string_cmp (exp, target);
4766 if (result)
4767 return result;
4769 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4770 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4771 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4772 return NULL_RTX;
4774 tree arg1 = CALL_EXPR_ARG (exp, 0);
4775 tree arg2 = CALL_EXPR_ARG (exp, 1);
4777 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4778 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4780 /* If we don't have POINTER_TYPE, call the function. */
4781 if (arg1_align == 0 || arg2_align == 0)
4782 return NULL_RTX;
4784 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4785 arg1 = builtin_save_expr (arg1);
4786 arg2 = builtin_save_expr (arg2);
4788 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4789 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4791 /* Try to call cmpstrsi. */
4792 if (cmpstr_icode != CODE_FOR_nothing)
4793 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4794 MIN (arg1_align, arg2_align));
4796 /* Try to determine at least one length and call cmpstrnsi. */
4797 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4799 tree len;
4800 rtx arg3_rtx;
4802 tree len1 = c_strlen (arg1, 1);
4803 tree len2 = c_strlen (arg2, 1);
4805 if (len1)
4806 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4807 if (len2)
4808 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4810 /* If we don't have a constant length for the first, use the length
4811 of the second, if we know it. We don't require a constant for
4812 this case; some cost analysis could be done if both are available
4813 but neither is constant. For now, assume they're equally cheap,
4814 unless one has side effects. If both strings have constant lengths,
4815 use the smaller. */
4817 if (!len1)
4818 len = len2;
4819 else if (!len2)
4820 len = len1;
4821 else if (TREE_SIDE_EFFECTS (len1))
4822 len = len2;
4823 else if (TREE_SIDE_EFFECTS (len2))
4824 len = len1;
4825 else if (TREE_CODE (len1) != INTEGER_CST)
4826 len = len2;
4827 else if (TREE_CODE (len2) != INTEGER_CST)
4828 len = len1;
4829 else if (tree_int_cst_lt (len1, len2))
4830 len = len1;
4831 else
4832 len = len2;
4834 /* If both arguments have side effects, we cannot optimize. */
4835 if (len && !TREE_SIDE_EFFECTS (len))
4837 arg3_rtx = expand_normal (len);
4838 result = expand_cmpstrn_or_cmpmem
4839 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4840 arg3_rtx, MIN (arg1_align, arg2_align));
4844 tree fndecl = get_callee_fndecl (exp);
4845 if (result)
4847 /* Check to see if the argument was declared attribute nonstring
4848 and if so, issue a warning since at this point it's not known
4849 to be nul-terminated. */
4850 maybe_warn_nonstring_arg (fndecl, exp);
4852 /* Return the value in the proper mode for this function. */
4853 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4854 if (GET_MODE (result) == mode)
4855 return result;
4856 if (target == 0)
4857 return convert_to_mode (mode, result, 0);
4858 convert_move (target, result, 0);
4859 return target;
4862 /* Expand the library call ourselves using a stabilized argument
4863 list to avoid re-evaluating the function's arguments twice. */
4864 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4865 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4866 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4867 return expand_call (fn, target, target == const0_rtx);
4870 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4871 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4872 the result in TARGET, if convenient. */
4874 static rtx
4875 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4876 ATTRIBUTE_UNUSED machine_mode mode)
4878 if (!validate_arglist (exp,
4879 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4880 return NULL_RTX;
4882 /* Due to the performance benefit, always inline the calls first. */
4883 rtx result = NULL_RTX;
4884 result = inline_expand_builtin_string_cmp (exp, target);
4885 if (result)
4886 return result;
4888 /* If c_strlen can determine an expression for one of the string
4889 lengths, and it doesn't have side effects, then emit cmpstrnsi
4890 using length MIN(strlen(string)+1, arg3). */
4891 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4892 if (cmpstrn_icode == CODE_FOR_nothing)
4893 return NULL_RTX;
4895 tree len;
4897 tree arg1 = CALL_EXPR_ARG (exp, 0);
4898 tree arg2 = CALL_EXPR_ARG (exp, 1);
4899 tree arg3 = CALL_EXPR_ARG (exp, 2);
4901 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4902 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4904 tree len1 = c_strlen (arg1, 1);
4905 tree len2 = c_strlen (arg2, 1);
4907 location_t loc = EXPR_LOCATION (exp);
4909 if (len1)
4910 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4911 if (len2)
4912 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4914 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4916 /* If we don't have a constant length for the first, use the length
4917 of the second, if we know it. If neither string is constant length,
4918 use the given length argument. We don't require a constant for
4919 this case; some cost analysis could be done if both are available
4920 but neither is constant. For now, assume they're equally cheap,
4921 unless one has side effects. If both strings have constant lengths,
4922 use the smaller. */
4924 if (!len1 && !len2)
4925 len = len3;
4926 else if (!len1)
4927 len = len2;
4928 else if (!len2)
4929 len = len1;
4930 else if (TREE_SIDE_EFFECTS (len1))
4931 len = len2;
4932 else if (TREE_SIDE_EFFECTS (len2))
4933 len = len1;
4934 else if (TREE_CODE (len1) != INTEGER_CST)
4935 len = len2;
4936 else if (TREE_CODE (len2) != INTEGER_CST)
4937 len = len1;
4938 else if (tree_int_cst_lt (len1, len2))
4939 len = len1;
4940 else
4941 len = len2;
4943 /* If we are not using the given length, we must incorporate it here.
4944 The actual new length parameter will be MIN(len,arg3) in this case. */
4945 if (len != len3)
4947 len = fold_convert_loc (loc, sizetype, len);
4948 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4950 rtx arg1_rtx = get_memory_rtx (arg1, len);
4951 rtx arg2_rtx = get_memory_rtx (arg2, len);
4952 rtx arg3_rtx = expand_normal (len);
4953 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4954 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4955 MIN (arg1_align, arg2_align));
4957 tree fndecl = get_callee_fndecl (exp);
4958 if (result)
4960 /* Check to see if the argument was declared attribute nonstring
4961 and if so, issue a warning since at this point it's not known
4962 to be nul-terminated. */
4963 maybe_warn_nonstring_arg (fndecl, exp);
4965 /* Return the value in the proper mode for this function. */
4966 mode = TYPE_MODE (TREE_TYPE (exp));
4967 if (GET_MODE (result) == mode)
4968 return result;
4969 if (target == 0)
4970 return convert_to_mode (mode, result, 0);
4971 convert_move (target, result, 0);
4972 return target;
4975 /* Expand the library call ourselves using a stabilized argument
4976 list to avoid re-evaluating the function's arguments twice. */
4977 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4978 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4979 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4980 return expand_call (fn, target, target == const0_rtx);
4983 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4984 if that's convenient. */
4987 expand_builtin_saveregs (void)
4989 rtx val;
4990 rtx_insn *seq;
4992 /* Don't do __builtin_saveregs more than once in a function.
4993 Save the result of the first call and reuse it. */
4994 if (saveregs_value != 0)
4995 return saveregs_value;
4997 /* When this function is called, it means that registers must be
4998 saved on entry to this function. So we migrate the call to the
4999 first insn of this function. */
5001 start_sequence ();
5003 /* Do whatever the machine needs done in this case. */
5004 val = targetm.calls.expand_builtin_saveregs ();
5006 seq = get_insns ();
5007 end_sequence ();
5009 saveregs_value = val;
5011 /* Put the insns after the NOTE that starts the function. If this
5012 is inside a start_sequence, make the outer-level insn chain current, so
5013 the code is placed at the start of the function. */
5014 push_topmost_sequence ();
5015 emit_insn_after (seq, entry_of_function ());
5016 pop_topmost_sequence ();
5018 return val;
5021 /* Expand a call to __builtin_next_arg. */
5023 static rtx
5024 expand_builtin_next_arg (void)
5026 /* Checking arguments is already done in fold_builtin_next_arg
5027 that must be called before this function. */
5028 return expand_binop (ptr_mode, add_optab,
5029 crtl->args.internal_arg_pointer,
5030 crtl->args.arg_offset_rtx,
5031 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5034 /* Make it easier for the backends by protecting the valist argument
5035 from multiple evaluations. */
5037 static tree
5038 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
5040 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
5042 /* The current way of determining the type of valist is completely
5043 bogus. We should have the information on the va builtin instead. */
5044 if (!vatype)
5045 vatype = targetm.fn_abi_va_list (cfun->decl);
5047 if (TREE_CODE (vatype) == ARRAY_TYPE)
5049 if (TREE_SIDE_EFFECTS (valist))
5050 valist = save_expr (valist);
5052 /* For this case, the backends will be expecting a pointer to
5053 vatype, but it's possible we've actually been given an array
5054 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
5055 So fix it. */
5056 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5058 tree p1 = build_pointer_type (TREE_TYPE (vatype));
5059 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
5062 else
5064 tree pt = build_pointer_type (vatype);
5066 if (! needs_lvalue)
5068 if (! TREE_SIDE_EFFECTS (valist))
5069 return valist;
5071 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
5072 TREE_SIDE_EFFECTS (valist) = 1;
5075 if (TREE_SIDE_EFFECTS (valist))
5076 valist = save_expr (valist);
5077 valist = fold_build2_loc (loc, MEM_REF,
5078 vatype, valist, build_int_cst (pt, 0));
5081 return valist;
5084 /* The "standard" definition of va_list is void*. */
5086 tree
5087 std_build_builtin_va_list (void)
5089 return ptr_type_node;
5092 /* The "standard" abi va_list is va_list_type_node. */
5094 tree
5095 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
5097 return va_list_type_node;
5100 /* The "standard" type of va_list is va_list_type_node. */
5102 tree
5103 std_canonical_va_list_type (tree type)
5105 tree wtype, htype;
5107 wtype = va_list_type_node;
5108 htype = type;
5110 if (TREE_CODE (wtype) == ARRAY_TYPE)
5112 /* If va_list is an array type, the argument may have decayed
5113 to a pointer type, e.g. by being passed to another function.
5114 In that case, unwrap both types so that we can compare the
5115 underlying records. */
5116 if (TREE_CODE (htype) == ARRAY_TYPE
5117 || POINTER_TYPE_P (htype))
5119 wtype = TREE_TYPE (wtype);
5120 htype = TREE_TYPE (htype);
5123 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
5124 return va_list_type_node;
5126 return NULL_TREE;
5129 /* The "standard" implementation of va_start: just assign `nextarg' to
5130 the variable. */
5132 void
5133 std_expand_builtin_va_start (tree valist, rtx nextarg)
5135 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
5136 convert_move (va_r, nextarg, 0);
5139 /* Expand EXP, a call to __builtin_va_start. */
5141 static rtx
5142 expand_builtin_va_start (tree exp)
5144 rtx nextarg;
5145 tree valist;
5146 location_t loc = EXPR_LOCATION (exp);
5148 if (call_expr_nargs (exp) < 2)
5150 error_at (loc, "too few arguments to function %<va_start%>");
5151 return const0_rtx;
5154 if (fold_builtin_next_arg (exp, true))
5155 return const0_rtx;
5157 nextarg = expand_builtin_next_arg ();
5158 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
5160 if (targetm.expand_builtin_va_start)
5161 targetm.expand_builtin_va_start (valist, nextarg);
5162 else
5163 std_expand_builtin_va_start (valist, nextarg);
5165 return const0_rtx;
5168 /* Expand EXP, a call to __builtin_va_end. */
5170 static rtx
5171 expand_builtin_va_end (tree exp)
5173 tree valist = CALL_EXPR_ARG (exp, 0);
5175 /* Evaluate for side effects, if needed. I hate macros that don't
5176 do that. */
5177 if (TREE_SIDE_EFFECTS (valist))
5178 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5180 return const0_rtx;
5183 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5184 builtin rather than just as an assignment in stdarg.h because of the
5185 nastiness of array-type va_list types. */
5187 static rtx
5188 expand_builtin_va_copy (tree exp)
5190 tree dst, src, t;
5191 location_t loc = EXPR_LOCATION (exp);
5193 dst = CALL_EXPR_ARG (exp, 0);
5194 src = CALL_EXPR_ARG (exp, 1);
5196 dst = stabilize_va_list_loc (loc, dst, 1);
5197 src = stabilize_va_list_loc (loc, src, 0);
5199 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5201 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5203 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5204 TREE_SIDE_EFFECTS (t) = 1;
5205 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5207 else
5209 rtx dstb, srcb, size;
5211 /* Evaluate to pointers. */
5212 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5213 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5214 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5215 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5217 dstb = convert_memory_address (Pmode, dstb);
5218 srcb = convert_memory_address (Pmode, srcb);
5220 /* "Dereference" to BLKmode memories. */
5221 dstb = gen_rtx_MEM (BLKmode, dstb);
5222 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5223 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5224 srcb = gen_rtx_MEM (BLKmode, srcb);
5225 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5226 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5228 /* Copy. */
5229 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5232 return const0_rtx;
5235 /* Expand a call to one of the builtin functions __builtin_frame_address or
5236 __builtin_return_address. */
5238 static rtx
5239 expand_builtin_frame_address (tree fndecl, tree exp)
5241 /* The argument must be a nonnegative integer constant.
5242 It counts the number of frames to scan up the stack.
5243 The value is either the frame pointer value or the return
5244 address saved in that frame. */
5245 if (call_expr_nargs (exp) == 0)
5246 /* Warning about missing arg was already issued. */
5247 return const0_rtx;
5248 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5250 error ("invalid argument to %qD", fndecl);
5251 return const0_rtx;
5253 else
5255 /* Number of frames to scan up the stack. */
5256 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5258 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5260 /* Some ports cannot access arbitrary stack frames. */
5261 if (tem == NULL)
5263 warning (0, "unsupported argument to %qD", fndecl);
5264 return const0_rtx;
5267 if (count)
5269 /* Warn since no effort is made to ensure that any frame
5270 beyond the current one exists or can be safely reached. */
5271 warning (OPT_Wframe_address, "calling %qD with "
5272 "a nonzero argument is unsafe", fndecl);
5275 /* For __builtin_frame_address, return what we've got. */
5276 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5277 return tem;
5279 if (!REG_P (tem)
5280 && ! CONSTANT_P (tem))
5281 tem = copy_addr_to_reg (tem);
5282 return tem;
5286 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5287 failed and the caller should emit a normal call. */
5289 static rtx
5290 expand_builtin_alloca (tree exp)
5292 rtx op0;
5293 rtx result;
5294 unsigned int align;
5295 tree fndecl = get_callee_fndecl (exp);
5296 HOST_WIDE_INT max_size;
5297 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5298 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5299 bool valid_arglist
5300 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5301 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5302 VOID_TYPE)
5303 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5304 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5305 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5307 if (!valid_arglist)
5308 return NULL_RTX;
5310 if ((alloca_for_var
5311 && warn_vla_limit >= HOST_WIDE_INT_MAX
5312 && warn_alloc_size_limit < warn_vla_limit)
5313 || (!alloca_for_var
5314 && warn_alloca_limit >= HOST_WIDE_INT_MAX
5315 && warn_alloc_size_limit < warn_alloca_limit
5318 /* -Walloca-larger-than and -Wvla-larger-than settings of
5319 less than HOST_WIDE_INT_MAX override the more general
5320 -Walloc-size-larger-than so unless either of the former
5321 options is smaller than the last one (wchich would imply
5322 that the call was already checked), check the alloca
5323 arguments for overflow. */
5324 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5325 int idx[] = { 0, -1 };
5326 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5329 /* Compute the argument. */
5330 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5332 /* Compute the alignment. */
5333 align = (fcode == BUILT_IN_ALLOCA
5334 ? BIGGEST_ALIGNMENT
5335 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5337 /* Compute the maximum size. */
5338 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5339 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5340 : -1);
5342 /* Allocate the desired space. If the allocation stems from the declaration
5343 of a variable-sized object, it cannot accumulate. */
5344 result
5345 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5346 result = convert_memory_address (ptr_mode, result);
5348 return result;
5351 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5352 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5353 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5354 handle_builtin_stack_restore function. */
5356 static rtx
5357 expand_asan_emit_allocas_unpoison (tree exp)
5359 tree arg0 = CALL_EXPR_ARG (exp, 0);
5360 tree arg1 = CALL_EXPR_ARG (exp, 1);
5361 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5362 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5363 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5364 stack_pointer_rtx, NULL_RTX, 0,
5365 OPTAB_LIB_WIDEN);
5366 off = convert_modes (ptr_mode, Pmode, off, 0);
5367 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5368 OPTAB_LIB_WIDEN);
5369 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5370 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5371 top, ptr_mode, bot, ptr_mode);
5372 return ret;
5375 /* Expand a call to bswap builtin in EXP.
5376 Return NULL_RTX if a normal call should be emitted rather than expanding the
5377 function in-line. If convenient, the result should be placed in TARGET.
5378 SUBTARGET may be used as the target for computing one of EXP's operands. */
5380 static rtx
5381 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5382 rtx subtarget)
5384 tree arg;
5385 rtx op0;
5387 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5388 return NULL_RTX;
5390 arg = CALL_EXPR_ARG (exp, 0);
5391 op0 = expand_expr (arg,
5392 subtarget && GET_MODE (subtarget) == target_mode
5393 ? subtarget : NULL_RTX,
5394 target_mode, EXPAND_NORMAL);
5395 if (GET_MODE (op0) != target_mode)
5396 op0 = convert_to_mode (target_mode, op0, 1);
5398 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5400 gcc_assert (target);
5402 return convert_to_mode (target_mode, target, 1);
5405 /* Expand a call to a unary builtin in EXP.
5406 Return NULL_RTX if a normal call should be emitted rather than expanding the
5407 function in-line. If convenient, the result should be placed in TARGET.
5408 SUBTARGET may be used as the target for computing one of EXP's operands. */
5410 static rtx
5411 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5412 rtx subtarget, optab op_optab)
5414 rtx op0;
5416 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5417 return NULL_RTX;
5419 /* Compute the argument. */
5420 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5421 (subtarget
5422 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5423 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5424 VOIDmode, EXPAND_NORMAL);
5425 /* Compute op, into TARGET if possible.
5426 Set TARGET to wherever the result comes back. */
5427 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5428 op_optab, op0, target, op_optab != clrsb_optab);
5429 gcc_assert (target);
5431 return convert_to_mode (target_mode, target, 0);
5434 /* Expand a call to __builtin_expect. We just return our argument
5435 as the builtin_expect semantic should've been already executed by
5436 tree branch prediction pass. */
5438 static rtx
5439 expand_builtin_expect (tree exp, rtx target)
5441 tree arg;
5443 if (call_expr_nargs (exp) < 2)
5444 return const0_rtx;
5445 arg = CALL_EXPR_ARG (exp, 0);
5447 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5448 /* When guessing was done, the hints should be already stripped away. */
5449 gcc_assert (!flag_guess_branch_prob
5450 || optimize == 0 || seen_error ());
5451 return target;
5454 /* Expand a call to __builtin_expect_with_probability. We just return our
5455 argument as the builtin_expect semantic should've been already executed by
5456 tree branch prediction pass. */
5458 static rtx
5459 expand_builtin_expect_with_probability (tree exp, rtx target)
5461 tree arg;
5463 if (call_expr_nargs (exp) < 3)
5464 return const0_rtx;
5465 arg = CALL_EXPR_ARG (exp, 0);
5467 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5468 /* When guessing was done, the hints should be already stripped away. */
5469 gcc_assert (!flag_guess_branch_prob
5470 || optimize == 0 || seen_error ());
5471 return target;
5475 /* Expand a call to __builtin_assume_aligned. We just return our first
5476 argument as the builtin_assume_aligned semantic should've been already
5477 executed by CCP. */
5479 static rtx
5480 expand_builtin_assume_aligned (tree exp, rtx target)
5482 if (call_expr_nargs (exp) < 2)
5483 return const0_rtx;
5484 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5485 EXPAND_NORMAL);
5486 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5487 && (call_expr_nargs (exp) < 3
5488 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5489 return target;
5492 void
5493 expand_builtin_trap (void)
5495 if (targetm.have_trap ())
5497 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5498 /* For trap insns when not accumulating outgoing args force
5499 REG_ARGS_SIZE note to prevent crossjumping of calls with
5500 different args sizes. */
5501 if (!ACCUMULATE_OUTGOING_ARGS)
5502 add_args_size_note (insn, stack_pointer_delta);
5504 else
5506 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5507 tree call_expr = build_call_expr (fn, 0);
5508 expand_call (call_expr, NULL_RTX, false);
5511 emit_barrier ();
5514 /* Expand a call to __builtin_unreachable. We do nothing except emit
5515 a barrier saying that control flow will not pass here.
5517 It is the responsibility of the program being compiled to ensure
5518 that control flow does never reach __builtin_unreachable. */
5519 static void
5520 expand_builtin_unreachable (void)
5522 emit_barrier ();
5525 /* Expand EXP, a call to fabs, fabsf or fabsl.
5526 Return NULL_RTX if a normal call should be emitted rather than expanding
5527 the function inline. If convenient, the result should be placed
5528 in TARGET. SUBTARGET may be used as the target for computing
5529 the operand. */
5531 static rtx
5532 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5534 machine_mode mode;
5535 tree arg;
5536 rtx op0;
5538 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5539 return NULL_RTX;
5541 arg = CALL_EXPR_ARG (exp, 0);
5542 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5543 mode = TYPE_MODE (TREE_TYPE (arg));
5544 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5545 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5548 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5549 Return NULL is a normal call should be emitted rather than expanding the
5550 function inline. If convenient, the result should be placed in TARGET.
5551 SUBTARGET may be used as the target for computing the operand. */
5553 static rtx
5554 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5556 rtx op0, op1;
5557 tree arg;
5559 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5560 return NULL_RTX;
5562 arg = CALL_EXPR_ARG (exp, 0);
5563 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5565 arg = CALL_EXPR_ARG (exp, 1);
5566 op1 = expand_normal (arg);
5568 return expand_copysign (op0, op1, target);
5571 /* Expand a call to __builtin___clear_cache. */
5573 static rtx
5574 expand_builtin___clear_cache (tree exp)
5576 if (!targetm.code_for_clear_cache)
5578 #ifdef CLEAR_INSN_CACHE
5579 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5580 does something. Just do the default expansion to a call to
5581 __clear_cache(). */
5582 return NULL_RTX;
5583 #else
5584 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5585 does nothing. There is no need to call it. Do nothing. */
5586 return const0_rtx;
5587 #endif /* CLEAR_INSN_CACHE */
5590 /* We have a "clear_cache" insn, and it will handle everything. */
5591 tree begin, end;
5592 rtx begin_rtx, end_rtx;
5594 /* We must not expand to a library call. If we did, any
5595 fallback library function in libgcc that might contain a call to
5596 __builtin___clear_cache() would recurse infinitely. */
5597 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5599 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5600 return const0_rtx;
5603 if (targetm.have_clear_cache ())
5605 struct expand_operand ops[2];
5607 begin = CALL_EXPR_ARG (exp, 0);
5608 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5610 end = CALL_EXPR_ARG (exp, 1);
5611 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5613 create_address_operand (&ops[0], begin_rtx);
5614 create_address_operand (&ops[1], end_rtx);
5615 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5616 return const0_rtx;
5618 return const0_rtx;
5621 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5623 static rtx
5624 round_trampoline_addr (rtx tramp)
5626 rtx temp, addend, mask;
5628 /* If we don't need too much alignment, we'll have been guaranteed
5629 proper alignment by get_trampoline_type. */
5630 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5631 return tramp;
5633 /* Round address up to desired boundary. */
5634 temp = gen_reg_rtx (Pmode);
5635 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5636 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5638 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5639 temp, 0, OPTAB_LIB_WIDEN);
5640 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5641 temp, 0, OPTAB_LIB_WIDEN);
5643 return tramp;
5646 static rtx
5647 expand_builtin_init_trampoline (tree exp, bool onstack)
5649 tree t_tramp, t_func, t_chain;
5650 rtx m_tramp, r_tramp, r_chain, tmp;
5652 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5653 POINTER_TYPE, VOID_TYPE))
5654 return NULL_RTX;
5656 t_tramp = CALL_EXPR_ARG (exp, 0);
5657 t_func = CALL_EXPR_ARG (exp, 1);
5658 t_chain = CALL_EXPR_ARG (exp, 2);
5660 r_tramp = expand_normal (t_tramp);
5661 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5662 MEM_NOTRAP_P (m_tramp) = 1;
5664 /* If ONSTACK, the TRAMP argument should be the address of a field
5665 within the local function's FRAME decl. Either way, let's see if
5666 we can fill in the MEM_ATTRs for this memory. */
5667 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5668 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5670 /* Creator of a heap trampoline is responsible for making sure the
5671 address is aligned to at least STACK_BOUNDARY. Normally malloc
5672 will ensure this anyhow. */
5673 tmp = round_trampoline_addr (r_tramp);
5674 if (tmp != r_tramp)
5676 m_tramp = change_address (m_tramp, BLKmode, tmp);
5677 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5678 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5681 /* The FUNC argument should be the address of the nested function.
5682 Extract the actual function decl to pass to the hook. */
5683 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5684 t_func = TREE_OPERAND (t_func, 0);
5685 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5687 r_chain = expand_normal (t_chain);
5689 /* Generate insns to initialize the trampoline. */
5690 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5692 if (onstack)
5694 trampolines_created = 1;
5696 if (targetm.calls.custom_function_descriptors != 0)
5697 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5698 "trampoline generated for nested function %qD", t_func);
5701 return const0_rtx;
5704 static rtx
5705 expand_builtin_adjust_trampoline (tree exp)
5707 rtx tramp;
5709 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5710 return NULL_RTX;
5712 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5713 tramp = round_trampoline_addr (tramp);
5714 if (targetm.calls.trampoline_adjust_address)
5715 tramp = targetm.calls.trampoline_adjust_address (tramp);
5717 return tramp;
5720 /* Expand a call to the builtin descriptor initialization routine.
5721 A descriptor is made up of a couple of pointers to the static
5722 chain and the code entry in this order. */
5724 static rtx
5725 expand_builtin_init_descriptor (tree exp)
5727 tree t_descr, t_func, t_chain;
5728 rtx m_descr, r_descr, r_func, r_chain;
5730 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5731 VOID_TYPE))
5732 return NULL_RTX;
5734 t_descr = CALL_EXPR_ARG (exp, 0);
5735 t_func = CALL_EXPR_ARG (exp, 1);
5736 t_chain = CALL_EXPR_ARG (exp, 2);
5738 r_descr = expand_normal (t_descr);
5739 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5740 MEM_NOTRAP_P (m_descr) = 1;
5742 r_func = expand_normal (t_func);
5743 r_chain = expand_normal (t_chain);
5745 /* Generate insns to initialize the descriptor. */
5746 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5747 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5748 POINTER_SIZE / BITS_PER_UNIT), r_func);
5750 return const0_rtx;
5753 /* Expand a call to the builtin descriptor adjustment routine. */
5755 static rtx
5756 expand_builtin_adjust_descriptor (tree exp)
5758 rtx tramp;
5760 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5761 return NULL_RTX;
5763 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5765 /* Unalign the descriptor to allow runtime identification. */
5766 tramp = plus_constant (ptr_mode, tramp,
5767 targetm.calls.custom_function_descriptors);
5769 return force_operand (tramp, NULL_RTX);
5772 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5773 function. The function first checks whether the back end provides
5774 an insn to implement signbit for the respective mode. If not, it
5775 checks whether the floating point format of the value is such that
5776 the sign bit can be extracted. If that is not the case, error out.
5777 EXP is the expression that is a call to the builtin function; if
5778 convenient, the result should be placed in TARGET. */
5779 static rtx
5780 expand_builtin_signbit (tree exp, rtx target)
5782 const struct real_format *fmt;
5783 scalar_float_mode fmode;
5784 scalar_int_mode rmode, imode;
5785 tree arg;
5786 int word, bitpos;
5787 enum insn_code icode;
5788 rtx temp;
5789 location_t loc = EXPR_LOCATION (exp);
5791 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5792 return NULL_RTX;
5794 arg = CALL_EXPR_ARG (exp, 0);
5795 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5796 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5797 fmt = REAL_MODE_FORMAT (fmode);
5799 arg = builtin_save_expr (arg);
5801 /* Expand the argument yielding a RTX expression. */
5802 temp = expand_normal (arg);
5804 /* Check if the back end provides an insn that handles signbit for the
5805 argument's mode. */
5806 icode = optab_handler (signbit_optab, fmode);
5807 if (icode != CODE_FOR_nothing)
5809 rtx_insn *last = get_last_insn ();
5810 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5811 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5812 return target;
5813 delete_insns_since (last);
5816 /* For floating point formats without a sign bit, implement signbit
5817 as "ARG < 0.0". */
5818 bitpos = fmt->signbit_ro;
5819 if (bitpos < 0)
5821 /* But we can't do this if the format supports signed zero. */
5822 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5824 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5825 build_real (TREE_TYPE (arg), dconst0));
5826 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5829 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5831 imode = int_mode_for_mode (fmode).require ();
5832 temp = gen_lowpart (imode, temp);
5834 else
5836 imode = word_mode;
5837 /* Handle targets with different FP word orders. */
5838 if (FLOAT_WORDS_BIG_ENDIAN)
5839 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5840 else
5841 word = bitpos / BITS_PER_WORD;
5842 temp = operand_subword_force (temp, word, fmode);
5843 bitpos = bitpos % BITS_PER_WORD;
5846 /* Force the intermediate word_mode (or narrower) result into a
5847 register. This avoids attempting to create paradoxical SUBREGs
5848 of floating point modes below. */
5849 temp = force_reg (imode, temp);
5851 /* If the bitpos is within the "result mode" lowpart, the operation
5852 can be implement with a single bitwise AND. Otherwise, we need
5853 a right shift and an AND. */
5855 if (bitpos < GET_MODE_BITSIZE (rmode))
5857 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5859 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5860 temp = gen_lowpart (rmode, temp);
5861 temp = expand_binop (rmode, and_optab, temp,
5862 immed_wide_int_const (mask, rmode),
5863 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5865 else
5867 /* Perform a logical right shift to place the signbit in the least
5868 significant bit, then truncate the result to the desired mode
5869 and mask just this bit. */
5870 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5871 temp = gen_lowpart (rmode, temp);
5872 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5873 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5876 return temp;
5879 /* Expand fork or exec calls. TARGET is the desired target of the
5880 call. EXP is the call. FN is the
5881 identificator of the actual function. IGNORE is nonzero if the
5882 value is to be ignored. */
5884 static rtx
5885 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5887 tree id, decl;
5888 tree call;
5890 /* If we are not profiling, just call the function. */
5891 if (!profile_arc_flag)
5892 return NULL_RTX;
5894 /* Otherwise call the wrapper. This should be equivalent for the rest of
5895 compiler, so the code does not diverge, and the wrapper may run the
5896 code necessary for keeping the profiling sane. */
5898 switch (DECL_FUNCTION_CODE (fn))
5900 case BUILT_IN_FORK:
5901 id = get_identifier ("__gcov_fork");
5902 break;
5904 case BUILT_IN_EXECL:
5905 id = get_identifier ("__gcov_execl");
5906 break;
5908 case BUILT_IN_EXECV:
5909 id = get_identifier ("__gcov_execv");
5910 break;
5912 case BUILT_IN_EXECLP:
5913 id = get_identifier ("__gcov_execlp");
5914 break;
5916 case BUILT_IN_EXECLE:
5917 id = get_identifier ("__gcov_execle");
5918 break;
5920 case BUILT_IN_EXECVP:
5921 id = get_identifier ("__gcov_execvp");
5922 break;
5924 case BUILT_IN_EXECVE:
5925 id = get_identifier ("__gcov_execve");
5926 break;
5928 default:
5929 gcc_unreachable ();
5932 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5933 FUNCTION_DECL, id, TREE_TYPE (fn));
5934 DECL_EXTERNAL (decl) = 1;
5935 TREE_PUBLIC (decl) = 1;
5936 DECL_ARTIFICIAL (decl) = 1;
5937 TREE_NOTHROW (decl) = 1;
5938 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5939 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5940 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5941 return expand_call (call, target, ignore);
5946 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5947 the pointer in these functions is void*, the tree optimizers may remove
5948 casts. The mode computed in expand_builtin isn't reliable either, due
5949 to __sync_bool_compare_and_swap.
5951 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5952 group of builtins. This gives us log2 of the mode size. */
5954 static inline machine_mode
5955 get_builtin_sync_mode (int fcode_diff)
5957 /* The size is not negotiable, so ask not to get BLKmode in return
5958 if the target indicates that a smaller size would be better. */
5959 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5962 /* Expand the memory expression LOC and return the appropriate memory operand
5963 for the builtin_sync operations. */
5965 static rtx
5966 get_builtin_sync_mem (tree loc, machine_mode mode)
5968 rtx addr, mem;
5969 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
5970 ? TREE_TYPE (TREE_TYPE (loc))
5971 : TREE_TYPE (loc));
5972 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
5974 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
5975 addr = convert_memory_address (addr_mode, addr);
5977 /* Note that we explicitly do not want any alias information for this
5978 memory, so that we kill all other live memories. Otherwise we don't
5979 satisfy the full barrier semantics of the intrinsic. */
5980 mem = gen_rtx_MEM (mode, addr);
5982 set_mem_addr_space (mem, addr_space);
5984 mem = validize_mem (mem);
5986 /* The alignment needs to be at least according to that of the mode. */
5987 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5988 get_pointer_alignment (loc)));
5989 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5990 MEM_VOLATILE_P (mem) = 1;
5992 return mem;
5995 /* Make sure an argument is in the right mode.
5996 EXP is the tree argument.
5997 MODE is the mode it should be in. */
5999 static rtx
6000 expand_expr_force_mode (tree exp, machine_mode mode)
6002 rtx val;
6003 machine_mode old_mode;
6005 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
6006 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6007 of CONST_INTs, where we know the old_mode only from the call argument. */
6009 old_mode = GET_MODE (val);
6010 if (old_mode == VOIDmode)
6011 old_mode = TYPE_MODE (TREE_TYPE (exp));
6012 val = convert_modes (mode, old_mode, val, 1);
6013 return val;
6017 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6018 EXP is the CALL_EXPR. CODE is the rtx code
6019 that corresponds to the arithmetic or logical operation from the name;
6020 an exception here is that NOT actually means NAND. TARGET is an optional
6021 place for us to store the results; AFTER is true if this is the
6022 fetch_and_xxx form. */
6024 static rtx
6025 expand_builtin_sync_operation (machine_mode mode, tree exp,
6026 enum rtx_code code, bool after,
6027 rtx target)
6029 rtx val, mem;
6030 location_t loc = EXPR_LOCATION (exp);
6032 if (code == NOT && warn_sync_nand)
6034 tree fndecl = get_callee_fndecl (exp);
6035 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6037 static bool warned_f_a_n, warned_n_a_f;
6039 switch (fcode)
6041 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6042 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6043 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6044 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6045 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6046 if (warned_f_a_n)
6047 break;
6049 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
6050 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6051 warned_f_a_n = true;
6052 break;
6054 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6055 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6056 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6057 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6058 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6059 if (warned_n_a_f)
6060 break;
6062 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
6063 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6064 warned_n_a_f = true;
6065 break;
6067 default:
6068 gcc_unreachable ();
6072 /* Expand the operands. */
6073 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6074 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6076 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
6077 after);
6080 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6081 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6082 true if this is the boolean form. TARGET is a place for us to store the
6083 results; this is NOT optional if IS_BOOL is true. */
6085 static rtx
6086 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
6087 bool is_bool, rtx target)
6089 rtx old_val, new_val, mem;
6090 rtx *pbool, *poval;
6092 /* Expand the operands. */
6093 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6094 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6095 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6097 pbool = poval = NULL;
6098 if (target != const0_rtx)
6100 if (is_bool)
6101 pbool = &target;
6102 else
6103 poval = &target;
6105 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
6106 false, MEMMODEL_SYNC_SEQ_CST,
6107 MEMMODEL_SYNC_SEQ_CST))
6108 return NULL_RTX;
6110 return target;
6113 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6114 general form is actually an atomic exchange, and some targets only
6115 support a reduced form with the second argument being a constant 1.
6116 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6117 the results. */
6119 static rtx
6120 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
6121 rtx target)
6123 rtx val, mem;
6125 /* Expand the operands. */
6126 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6127 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6129 return expand_sync_lock_test_and_set (target, mem, val);
6132 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6134 static void
6135 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
6137 rtx mem;
6139 /* Expand the operands. */
6140 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6142 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
6145 /* Given an integer representing an ``enum memmodel'', verify its
6146 correctness and return the memory model enum. */
6148 static enum memmodel
6149 get_memmodel (tree exp)
6151 rtx op;
6152 unsigned HOST_WIDE_INT val;
6153 location_t loc
6154 = expansion_point_location_if_in_system_header (input_location);
6156 /* If the parameter is not a constant, it's a run time value so we'll just
6157 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6158 if (TREE_CODE (exp) != INTEGER_CST)
6159 return MEMMODEL_SEQ_CST;
6161 op = expand_normal (exp);
6163 val = INTVAL (op);
6164 if (targetm.memmodel_check)
6165 val = targetm.memmodel_check (val);
6166 else if (val & ~MEMMODEL_MASK)
6168 warning_at (loc, OPT_Winvalid_memory_model,
6169 "unknown architecture specifier in memory model to builtin");
6170 return MEMMODEL_SEQ_CST;
6173 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6174 if (memmodel_base (val) >= MEMMODEL_LAST)
6176 warning_at (loc, OPT_Winvalid_memory_model,
6177 "invalid memory model argument to builtin");
6178 return MEMMODEL_SEQ_CST;
6181 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6182 be conservative and promote consume to acquire. */
6183 if (val == MEMMODEL_CONSUME)
6184 val = MEMMODEL_ACQUIRE;
6186 return (enum memmodel) val;
6189 /* Expand the __atomic_exchange intrinsic:
6190 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6191 EXP is the CALL_EXPR.
6192 TARGET is an optional place for us to store the results. */
6194 static rtx
6195 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
6197 rtx val, mem;
6198 enum memmodel model;
6200 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6202 if (!flag_inline_atomics)
6203 return NULL_RTX;
6205 /* Expand the operands. */
6206 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6207 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6209 return expand_atomic_exchange (target, mem, val, model);
6212 /* Expand the __atomic_compare_exchange intrinsic:
6213 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6214 TYPE desired, BOOL weak,
6215 enum memmodel success,
6216 enum memmodel failure)
6217 EXP is the CALL_EXPR.
6218 TARGET is an optional place for us to store the results. */
6220 static rtx
6221 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
6222 rtx target)
6224 rtx expect, desired, mem, oldval;
6225 rtx_code_label *label;
6226 enum memmodel success, failure;
6227 tree weak;
6228 bool is_weak;
6229 location_t loc
6230 = expansion_point_location_if_in_system_header (input_location);
6232 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6233 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6235 if (failure > success)
6237 warning_at (loc, OPT_Winvalid_memory_model,
6238 "failure memory model cannot be stronger than success "
6239 "memory model for %<__atomic_compare_exchange%>");
6240 success = MEMMODEL_SEQ_CST;
6243 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6245 warning_at (loc, OPT_Winvalid_memory_model,
6246 "invalid failure memory model for "
6247 "%<__atomic_compare_exchange%>");
6248 failure = MEMMODEL_SEQ_CST;
6249 success = MEMMODEL_SEQ_CST;
6253 if (!flag_inline_atomics)
6254 return NULL_RTX;
6256 /* Expand the operands. */
6257 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6259 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6260 expect = convert_memory_address (Pmode, expect);
6261 expect = gen_rtx_MEM (mode, expect);
6262 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6264 weak = CALL_EXPR_ARG (exp, 3);
6265 is_weak = false;
6266 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6267 is_weak = true;
6269 if (target == const0_rtx)
6270 target = NULL;
6272 /* Lest the rtl backend create a race condition with an imporoper store
6273 to memory, always create a new pseudo for OLDVAL. */
6274 oldval = NULL;
6276 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6277 is_weak, success, failure))
6278 return NULL_RTX;
6280 /* Conditionally store back to EXPECT, lest we create a race condition
6281 with an improper store to memory. */
6282 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6283 the normal case where EXPECT is totally private, i.e. a register. At
6284 which point the store can be unconditional. */
6285 label = gen_label_rtx ();
6286 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6287 GET_MODE (target), 1, label);
6288 emit_move_insn (expect, oldval);
6289 emit_label (label);
6291 return target;
6294 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6295 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6296 call. The weak parameter must be dropped to match the expected parameter
6297 list and the expected argument changed from value to pointer to memory
6298 slot. */
6300 static void
6301 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6303 unsigned int z;
6304 vec<tree, va_gc> *vec;
6306 vec_alloc (vec, 5);
6307 vec->quick_push (gimple_call_arg (call, 0));
6308 tree expected = gimple_call_arg (call, 1);
6309 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6310 TREE_TYPE (expected));
6311 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6312 if (expd != x)
6313 emit_move_insn (x, expd);
6314 tree v = make_tree (TREE_TYPE (expected), x);
6315 vec->quick_push (build1 (ADDR_EXPR,
6316 build_pointer_type (TREE_TYPE (expected)), v));
6317 vec->quick_push (gimple_call_arg (call, 2));
6318 /* Skip the boolean weak parameter. */
6319 for (z = 4; z < 6; z++)
6320 vec->quick_push (gimple_call_arg (call, z));
6321 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6322 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6323 gcc_assert (bytes_log2 < 5);
6324 built_in_function fncode
6325 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6326 + bytes_log2);
6327 tree fndecl = builtin_decl_explicit (fncode);
6328 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6329 fndecl);
6330 tree exp = build_call_vec (boolean_type_node, fn, vec);
6331 tree lhs = gimple_call_lhs (call);
6332 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6333 if (lhs)
6335 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6336 if (GET_MODE (boolret) != mode)
6337 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6338 x = force_reg (mode, x);
6339 write_complex_part (target, boolret, true);
6340 write_complex_part (target, x, false);
6344 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6346 void
6347 expand_ifn_atomic_compare_exchange (gcall *call)
6349 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6350 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6351 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6352 rtx expect, desired, mem, oldval, boolret;
6353 enum memmodel success, failure;
6354 tree lhs;
6355 bool is_weak;
6356 location_t loc
6357 = expansion_point_location_if_in_system_header (gimple_location (call));
6359 success = get_memmodel (gimple_call_arg (call, 4));
6360 failure = get_memmodel (gimple_call_arg (call, 5));
6362 if (failure > success)
6364 warning_at (loc, OPT_Winvalid_memory_model,
6365 "failure memory model cannot be stronger than success "
6366 "memory model for %<__atomic_compare_exchange%>");
6367 success = MEMMODEL_SEQ_CST;
6370 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6372 warning_at (loc, OPT_Winvalid_memory_model,
6373 "invalid failure memory model for "
6374 "%<__atomic_compare_exchange%>");
6375 failure = MEMMODEL_SEQ_CST;
6376 success = MEMMODEL_SEQ_CST;
6379 if (!flag_inline_atomics)
6381 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6382 return;
6385 /* Expand the operands. */
6386 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6388 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6389 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6391 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6393 boolret = NULL;
6394 oldval = NULL;
6396 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6397 is_weak, success, failure))
6399 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6400 return;
6403 lhs = gimple_call_lhs (call);
6404 if (lhs)
6406 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6407 if (GET_MODE (boolret) != mode)
6408 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6409 write_complex_part (target, boolret, true);
6410 write_complex_part (target, oldval, false);
6414 /* Expand the __atomic_load intrinsic:
6415 TYPE __atomic_load (TYPE *object, enum memmodel)
6416 EXP is the CALL_EXPR.
6417 TARGET is an optional place for us to store the results. */
6419 static rtx
6420 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6422 rtx mem;
6423 enum memmodel model;
6425 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6426 if (is_mm_release (model) || is_mm_acq_rel (model))
6428 location_t loc
6429 = expansion_point_location_if_in_system_header (input_location);
6430 warning_at (loc, OPT_Winvalid_memory_model,
6431 "invalid memory model for %<__atomic_load%>");
6432 model = MEMMODEL_SEQ_CST;
6435 if (!flag_inline_atomics)
6436 return NULL_RTX;
6438 /* Expand the operand. */
6439 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6441 return expand_atomic_load (target, mem, model);
6445 /* Expand the __atomic_store intrinsic:
6446 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6447 EXP is the CALL_EXPR.
6448 TARGET is an optional place for us to store the results. */
6450 static rtx
6451 expand_builtin_atomic_store (machine_mode mode, tree exp)
6453 rtx mem, val;
6454 enum memmodel model;
6456 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6457 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6458 || is_mm_release (model)))
6460 location_t loc
6461 = expansion_point_location_if_in_system_header (input_location);
6462 warning_at (loc, OPT_Winvalid_memory_model,
6463 "invalid memory model for %<__atomic_store%>");
6464 model = MEMMODEL_SEQ_CST;
6467 if (!flag_inline_atomics)
6468 return NULL_RTX;
6470 /* Expand the operands. */
6471 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6472 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6474 return expand_atomic_store (mem, val, model, false);
6477 /* Expand the __atomic_fetch_XXX intrinsic:
6478 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6479 EXP is the CALL_EXPR.
6480 TARGET is an optional place for us to store the results.
6481 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6482 FETCH_AFTER is true if returning the result of the operation.
6483 FETCH_AFTER is false if returning the value before the operation.
6484 IGNORE is true if the result is not used.
6485 EXT_CALL is the correct builtin for an external call if this cannot be
6486 resolved to an instruction sequence. */
6488 static rtx
6489 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6490 enum rtx_code code, bool fetch_after,
6491 bool ignore, enum built_in_function ext_call)
6493 rtx val, mem, ret;
6494 enum memmodel model;
6495 tree fndecl;
6496 tree addr;
6498 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6500 /* Expand the operands. */
6501 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6502 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6504 /* Only try generating instructions if inlining is turned on. */
6505 if (flag_inline_atomics)
6507 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6508 if (ret)
6509 return ret;
6512 /* Return if a different routine isn't needed for the library call. */
6513 if (ext_call == BUILT_IN_NONE)
6514 return NULL_RTX;
6516 /* Change the call to the specified function. */
6517 fndecl = get_callee_fndecl (exp);
6518 addr = CALL_EXPR_FN (exp);
6519 STRIP_NOPS (addr);
6521 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6522 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6524 /* If we will emit code after the call, the call can not be a tail call.
6525 If it is emitted as a tail call, a barrier is emitted after it, and
6526 then all trailing code is removed. */
6527 if (!ignore)
6528 CALL_EXPR_TAILCALL (exp) = 0;
6530 /* Expand the call here so we can emit trailing code. */
6531 ret = expand_call (exp, target, ignore);
6533 /* Replace the original function just in case it matters. */
6534 TREE_OPERAND (addr, 0) = fndecl;
6536 /* Then issue the arithmetic correction to return the right result. */
6537 if (!ignore)
6539 if (code == NOT)
6541 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6542 OPTAB_LIB_WIDEN);
6543 ret = expand_simple_unop (mode, NOT, ret, target, true);
6545 else
6546 ret = expand_simple_binop (mode, code, ret, val, target, true,
6547 OPTAB_LIB_WIDEN);
6549 return ret;
6552 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6554 void
6555 expand_ifn_atomic_bit_test_and (gcall *call)
6557 tree ptr = gimple_call_arg (call, 0);
6558 tree bit = gimple_call_arg (call, 1);
6559 tree flag = gimple_call_arg (call, 2);
6560 tree lhs = gimple_call_lhs (call);
6561 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6562 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6563 enum rtx_code code;
6564 optab optab;
6565 struct expand_operand ops[5];
6567 gcc_assert (flag_inline_atomics);
6569 if (gimple_call_num_args (call) == 4)
6570 model = get_memmodel (gimple_call_arg (call, 3));
6572 rtx mem = get_builtin_sync_mem (ptr, mode);
6573 rtx val = expand_expr_force_mode (bit, mode);
6575 switch (gimple_call_internal_fn (call))
6577 case IFN_ATOMIC_BIT_TEST_AND_SET:
6578 code = IOR;
6579 optab = atomic_bit_test_and_set_optab;
6580 break;
6581 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6582 code = XOR;
6583 optab = atomic_bit_test_and_complement_optab;
6584 break;
6585 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6586 code = AND;
6587 optab = atomic_bit_test_and_reset_optab;
6588 break;
6589 default:
6590 gcc_unreachable ();
6593 if (lhs == NULL_TREE)
6595 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6596 val, NULL_RTX, true, OPTAB_DIRECT);
6597 if (code == AND)
6598 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6599 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6600 return;
6603 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6604 enum insn_code icode = direct_optab_handler (optab, mode);
6605 gcc_assert (icode != CODE_FOR_nothing);
6606 create_output_operand (&ops[0], target, mode);
6607 create_fixed_operand (&ops[1], mem);
6608 create_convert_operand_to (&ops[2], val, mode, true);
6609 create_integer_operand (&ops[3], model);
6610 create_integer_operand (&ops[4], integer_onep (flag));
6611 if (maybe_expand_insn (icode, 5, ops))
6612 return;
6614 rtx bitval = val;
6615 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6616 val, NULL_RTX, true, OPTAB_DIRECT);
6617 rtx maskval = val;
6618 if (code == AND)
6619 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6620 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6621 code, model, false);
6622 if (integer_onep (flag))
6624 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6625 NULL_RTX, true, OPTAB_DIRECT);
6626 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6627 true, OPTAB_DIRECT);
6629 else
6630 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6631 OPTAB_DIRECT);
6632 if (result != target)
6633 emit_move_insn (target, result);
6636 /* Expand an atomic clear operation.
6637 void _atomic_clear (BOOL *obj, enum memmodel)
6638 EXP is the call expression. */
6640 static rtx
6641 expand_builtin_atomic_clear (tree exp)
6643 machine_mode mode;
6644 rtx mem, ret;
6645 enum memmodel model;
6647 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6648 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6649 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6651 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6653 location_t loc
6654 = expansion_point_location_if_in_system_header (input_location);
6655 warning_at (loc, OPT_Winvalid_memory_model,
6656 "invalid memory model for %<__atomic_store%>");
6657 model = MEMMODEL_SEQ_CST;
6660 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6661 Failing that, a store is issued by __atomic_store. The only way this can
6662 fail is if the bool type is larger than a word size. Unlikely, but
6663 handle it anyway for completeness. Assume a single threaded model since
6664 there is no atomic support in this case, and no barriers are required. */
6665 ret = expand_atomic_store (mem, const0_rtx, model, true);
6666 if (!ret)
6667 emit_move_insn (mem, const0_rtx);
6668 return const0_rtx;
6671 /* Expand an atomic test_and_set operation.
6672 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6673 EXP is the call expression. */
6675 static rtx
6676 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6678 rtx mem;
6679 enum memmodel model;
6680 machine_mode mode;
6682 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6683 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6684 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6686 return expand_atomic_test_and_set (target, mem, model);
6690 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6691 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6693 static tree
6694 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6696 int size;
6697 machine_mode mode;
6698 unsigned int mode_align, type_align;
6700 if (TREE_CODE (arg0) != INTEGER_CST)
6701 return NULL_TREE;
6703 /* We need a corresponding integer mode for the access to be lock-free. */
6704 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6705 if (!int_mode_for_size (size, 0).exists (&mode))
6706 return boolean_false_node;
6708 mode_align = GET_MODE_ALIGNMENT (mode);
6710 if (TREE_CODE (arg1) == INTEGER_CST)
6712 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6714 /* Either this argument is null, or it's a fake pointer encoding
6715 the alignment of the object. */
6716 val = least_bit_hwi (val);
6717 val *= BITS_PER_UNIT;
6719 if (val == 0 || mode_align < val)
6720 type_align = mode_align;
6721 else
6722 type_align = val;
6724 else
6726 tree ttype = TREE_TYPE (arg1);
6728 /* This function is usually invoked and folded immediately by the front
6729 end before anything else has a chance to look at it. The pointer
6730 parameter at this point is usually cast to a void *, so check for that
6731 and look past the cast. */
6732 if (CONVERT_EXPR_P (arg1)
6733 && POINTER_TYPE_P (ttype)
6734 && VOID_TYPE_P (TREE_TYPE (ttype))
6735 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6736 arg1 = TREE_OPERAND (arg1, 0);
6738 ttype = TREE_TYPE (arg1);
6739 gcc_assert (POINTER_TYPE_P (ttype));
6741 /* Get the underlying type of the object. */
6742 ttype = TREE_TYPE (ttype);
6743 type_align = TYPE_ALIGN (ttype);
6746 /* If the object has smaller alignment, the lock free routines cannot
6747 be used. */
6748 if (type_align < mode_align)
6749 return boolean_false_node;
6751 /* Check if a compare_and_swap pattern exists for the mode which represents
6752 the required size. The pattern is not allowed to fail, so the existence
6753 of the pattern indicates support is present. Also require that an
6754 atomic load exists for the required size. */
6755 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6756 return boolean_true_node;
6757 else
6758 return boolean_false_node;
6761 /* Return true if the parameters to call EXP represent an object which will
6762 always generate lock free instructions. The first argument represents the
6763 size of the object, and the second parameter is a pointer to the object
6764 itself. If NULL is passed for the object, then the result is based on
6765 typical alignment for an object of the specified size. Otherwise return
6766 false. */
6768 static rtx
6769 expand_builtin_atomic_always_lock_free (tree exp)
6771 tree size;
6772 tree arg0 = CALL_EXPR_ARG (exp, 0);
6773 tree arg1 = CALL_EXPR_ARG (exp, 1);
6775 if (TREE_CODE (arg0) != INTEGER_CST)
6777 error ("non-constant argument 1 to __atomic_always_lock_free");
6778 return const0_rtx;
6781 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6782 if (size == boolean_true_node)
6783 return const1_rtx;
6784 return const0_rtx;
6787 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6788 is lock free on this architecture. */
6790 static tree
6791 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6793 if (!flag_inline_atomics)
6794 return NULL_TREE;
6796 /* If it isn't always lock free, don't generate a result. */
6797 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6798 return boolean_true_node;
6800 return NULL_TREE;
6803 /* Return true if the parameters to call EXP represent an object which will
6804 always generate lock free instructions. The first argument represents the
6805 size of the object, and the second parameter is a pointer to the object
6806 itself. If NULL is passed for the object, then the result is based on
6807 typical alignment for an object of the specified size. Otherwise return
6808 NULL*/
6810 static rtx
6811 expand_builtin_atomic_is_lock_free (tree exp)
6813 tree size;
6814 tree arg0 = CALL_EXPR_ARG (exp, 0);
6815 tree arg1 = CALL_EXPR_ARG (exp, 1);
6817 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6819 error ("non-integer argument 1 to __atomic_is_lock_free");
6820 return NULL_RTX;
6823 if (!flag_inline_atomics)
6824 return NULL_RTX;
6826 /* If the value is known at compile time, return the RTX for it. */
6827 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6828 if (size == boolean_true_node)
6829 return const1_rtx;
6831 return NULL_RTX;
6834 /* Expand the __atomic_thread_fence intrinsic:
6835 void __atomic_thread_fence (enum memmodel)
6836 EXP is the CALL_EXPR. */
6838 static void
6839 expand_builtin_atomic_thread_fence (tree exp)
6841 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6842 expand_mem_thread_fence (model);
6845 /* Expand the __atomic_signal_fence intrinsic:
6846 void __atomic_signal_fence (enum memmodel)
6847 EXP is the CALL_EXPR. */
6849 static void
6850 expand_builtin_atomic_signal_fence (tree exp)
6852 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6853 expand_mem_signal_fence (model);
6856 /* Expand the __sync_synchronize intrinsic. */
6858 static void
6859 expand_builtin_sync_synchronize (void)
6861 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6864 static rtx
6865 expand_builtin_thread_pointer (tree exp, rtx target)
6867 enum insn_code icode;
6868 if (!validate_arglist (exp, VOID_TYPE))
6869 return const0_rtx;
6870 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6871 if (icode != CODE_FOR_nothing)
6873 struct expand_operand op;
6874 /* If the target is not sutitable then create a new target. */
6875 if (target == NULL_RTX
6876 || !REG_P (target)
6877 || GET_MODE (target) != Pmode)
6878 target = gen_reg_rtx (Pmode);
6879 create_output_operand (&op, target, Pmode);
6880 expand_insn (icode, 1, &op);
6881 return target;
6883 error ("__builtin_thread_pointer is not supported on this target");
6884 return const0_rtx;
6887 static void
6888 expand_builtin_set_thread_pointer (tree exp)
6890 enum insn_code icode;
6891 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6892 return;
6893 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6894 if (icode != CODE_FOR_nothing)
6896 struct expand_operand op;
6897 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6898 Pmode, EXPAND_NORMAL);
6899 create_input_operand (&op, val, Pmode);
6900 expand_insn (icode, 1, &op);
6901 return;
6903 error ("__builtin_set_thread_pointer is not supported on this target");
6907 /* Emit code to restore the current value of stack. */
6909 static void
6910 expand_stack_restore (tree var)
6912 rtx_insn *prev;
6913 rtx sa = expand_normal (var);
6915 sa = convert_memory_address (Pmode, sa);
6917 prev = get_last_insn ();
6918 emit_stack_restore (SAVE_BLOCK, sa);
6920 record_new_stack_level ();
6922 fixup_args_size_notes (prev, get_last_insn (), 0);
6925 /* Emit code to save the current value of stack. */
6927 static rtx
6928 expand_stack_save (void)
6930 rtx ret = NULL_RTX;
6932 emit_stack_save (SAVE_BLOCK, &ret);
6933 return ret;
6936 /* Emit code to get the openacc gang, worker or vector id or size. */
6938 static rtx
6939 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
6941 const char *name;
6942 rtx fallback_retval;
6943 rtx_insn *(*gen_fn) (rtx, rtx);
6944 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
6946 case BUILT_IN_GOACC_PARLEVEL_ID:
6947 name = "__builtin_goacc_parlevel_id";
6948 fallback_retval = const0_rtx;
6949 gen_fn = targetm.gen_oacc_dim_pos;
6950 break;
6951 case BUILT_IN_GOACC_PARLEVEL_SIZE:
6952 name = "__builtin_goacc_parlevel_size";
6953 fallback_retval = const1_rtx;
6954 gen_fn = targetm.gen_oacc_dim_size;
6955 break;
6956 default:
6957 gcc_unreachable ();
6960 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
6962 error ("%qs only supported in OpenACC code", name);
6963 return const0_rtx;
6966 tree arg = CALL_EXPR_ARG (exp, 0);
6967 if (TREE_CODE (arg) != INTEGER_CST)
6969 error ("non-constant argument 0 to %qs", name);
6970 return const0_rtx;
6973 int dim = TREE_INT_CST_LOW (arg);
6974 switch (dim)
6976 case GOMP_DIM_GANG:
6977 case GOMP_DIM_WORKER:
6978 case GOMP_DIM_VECTOR:
6979 break;
6980 default:
6981 error ("illegal argument 0 to %qs", name);
6982 return const0_rtx;
6985 if (ignore)
6986 return target;
6988 if (target == NULL_RTX)
6989 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6991 if (!targetm.have_oacc_dim_size ())
6993 emit_move_insn (target, fallback_retval);
6994 return target;
6997 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
6998 emit_insn (gen_fn (reg, GEN_INT (dim)));
6999 if (reg != target)
7000 emit_move_insn (target, reg);
7002 return target;
7005 /* Expand a string compare operation using a sequence of char comparison
7006 to get rid of the calling overhead, with result going to TARGET if
7007 that's convenient.
7009 VAR_STR is the variable string source;
7010 CONST_STR is the constant string source;
7011 LENGTH is the number of chars to compare;
7012 CONST_STR_N indicates which source string is the constant string;
7013 IS_MEMCMP indicates whether it's a memcmp or strcmp.
7015 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7017 target = (int) (unsigned char) var_str[0]
7018 - (int) (unsigned char) const_str[0];
7019 if (target != 0)
7020 goto ne_label;
7022 target = (int) (unsigned char) var_str[length - 2]
7023 - (int) (unsigned char) const_str[length - 2];
7024 if (target != 0)
7025 goto ne_label;
7026 target = (int) (unsigned char) var_str[length - 1]
7027 - (int) (unsigned char) const_str[length - 1];
7028 ne_label:
7031 static rtx
7032 inline_string_cmp (rtx target, tree var_str, const char *const_str,
7033 unsigned HOST_WIDE_INT length,
7034 int const_str_n, machine_mode mode)
7036 HOST_WIDE_INT offset = 0;
7037 rtx var_rtx_array
7038 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
7039 rtx var_rtx = NULL_RTX;
7040 rtx const_rtx = NULL_RTX;
7041 rtx result = target ? target : gen_reg_rtx (mode);
7042 rtx_code_label *ne_label = gen_label_rtx ();
7043 tree unit_type_node = unsigned_char_type_node;
7044 scalar_int_mode unit_mode
7045 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
7047 start_sequence ();
7049 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
7051 var_rtx
7052 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
7053 const_rtx = c_readstr (const_str + offset, unit_mode);
7054 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
7055 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
7057 op0 = convert_modes (mode, unit_mode, op0, 1);
7058 op1 = convert_modes (mode, unit_mode, op1, 1);
7059 result = expand_simple_binop (mode, MINUS, op0, op1,
7060 result, 1, OPTAB_WIDEN);
7061 if (i < length - 1)
7062 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
7063 mode, true, ne_label);
7064 offset += GET_MODE_SIZE (unit_mode);
7067 emit_label (ne_label);
7068 rtx_insn *insns = get_insns ();
7069 end_sequence ();
7070 emit_insn (insns);
7072 return result;
7075 /* Inline expansion a call to str(n)cmp, with result going to
7076 TARGET if that's convenient.
7077 If the call is not been inlined, return NULL_RTX. */
7078 static rtx
7079 inline_expand_builtin_string_cmp (tree exp, rtx target)
7081 tree fndecl = get_callee_fndecl (exp);
7082 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7083 unsigned HOST_WIDE_INT length = 0;
7084 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
7086 /* Do NOT apply this inlining expansion when optimizing for size or
7087 optimization level below 2. */
7088 if (optimize < 2 || optimize_insn_for_size_p ())
7089 return NULL_RTX;
7091 gcc_checking_assert (fcode == BUILT_IN_STRCMP
7092 || fcode == BUILT_IN_STRNCMP
7093 || fcode == BUILT_IN_MEMCMP);
7095 /* On a target where the type of the call (int) has same or narrower presicion
7096 than unsigned char, give up the inlining expansion. */
7097 if (TYPE_PRECISION (unsigned_char_type_node)
7098 >= TYPE_PRECISION (TREE_TYPE (exp)))
7099 return NULL_RTX;
7101 tree arg1 = CALL_EXPR_ARG (exp, 0);
7102 tree arg2 = CALL_EXPR_ARG (exp, 1);
7103 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
7105 unsigned HOST_WIDE_INT len1 = 0;
7106 unsigned HOST_WIDE_INT len2 = 0;
7107 unsigned HOST_WIDE_INT len3 = 0;
7109 const char *src_str1 = c_getstr (arg1, &len1);
7110 const char *src_str2 = c_getstr (arg2, &len2);
7112 /* If neither strings is constant string, the call is not qualify. */
7113 if (!src_str1 && !src_str2)
7114 return NULL_RTX;
7116 /* For strncmp, if the length is not a const, not qualify. */
7117 if (is_ncmp && !tree_fits_uhwi_p (len3_tree))
7118 return NULL_RTX;
7120 int const_str_n = 0;
7121 if (!len1)
7122 const_str_n = 2;
7123 else if (!len2)
7124 const_str_n = 1;
7125 else if (len2 > len1)
7126 const_str_n = 1;
7127 else
7128 const_str_n = 2;
7130 gcc_checking_assert (const_str_n > 0);
7131 length = (const_str_n == 1) ? len1 : len2;
7133 if (is_ncmp && (len3 = tree_to_uhwi (len3_tree)) < length)
7134 length = len3;
7136 /* If the length of the comparision is larger than the threshold,
7137 do nothing. */
7138 if (length > (unsigned HOST_WIDE_INT)
7139 PARAM_VALUE (BUILTIN_STRING_CMP_INLINE_LENGTH))
7140 return NULL_RTX;
7142 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7144 /* Now, start inline expansion the call. */
7145 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
7146 (const_str_n == 1) ? src_str1 : src_str2, length,
7147 const_str_n, mode);
7150 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7151 represents the size of the first argument to that call, or VOIDmode
7152 if the argument is a pointer. IGNORE will be true if the result
7153 isn't used. */
7154 static rtx
7155 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
7156 bool ignore)
7158 rtx val, failsafe;
7159 unsigned nargs = call_expr_nargs (exp);
7161 tree arg0 = CALL_EXPR_ARG (exp, 0);
7163 if (mode == VOIDmode)
7165 mode = TYPE_MODE (TREE_TYPE (arg0));
7166 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
7169 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
7171 /* An optional second argument can be used as a failsafe value on
7172 some machines. If it isn't present, then the failsafe value is
7173 assumed to be 0. */
7174 if (nargs > 1)
7176 tree arg1 = CALL_EXPR_ARG (exp, 1);
7177 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
7179 else
7180 failsafe = const0_rtx;
7182 /* If the result isn't used, the behavior is undefined. It would be
7183 nice to emit a warning here, but path splitting means this might
7184 happen with legitimate code. So simply drop the builtin
7185 expansion in that case; we've handled any side-effects above. */
7186 if (ignore)
7187 return const0_rtx;
7189 /* If we don't have a suitable target, create one to hold the result. */
7190 if (target == NULL || GET_MODE (target) != mode)
7191 target = gen_reg_rtx (mode);
7193 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7194 val = convert_modes (mode, VOIDmode, val, false);
7196 return targetm.speculation_safe_value (mode, target, val, failsafe);
7199 /* Expand an expression EXP that calls a built-in function,
7200 with result going to TARGET if that's convenient
7201 (and in mode MODE if that's convenient).
7202 SUBTARGET may be used as the target for computing one of EXP's operands.
7203 IGNORE is nonzero if the value is to be ignored. */
7206 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7207 int ignore)
7209 tree fndecl = get_callee_fndecl (exp);
7210 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7211 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7212 int flags;
7214 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7215 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7217 /* When ASan is enabled, we don't want to expand some memory/string
7218 builtins and rely on libsanitizer's hooks. This allows us to avoid
7219 redundant checks and be sure, that possible overflow will be detected
7220 by ASan. */
7222 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7223 return expand_call (exp, target, ignore);
7225 /* When not optimizing, generate calls to library functions for a certain
7226 set of builtins. */
7227 if (!optimize
7228 && !called_as_built_in (fndecl)
7229 && fcode != BUILT_IN_FORK
7230 && fcode != BUILT_IN_EXECL
7231 && fcode != BUILT_IN_EXECV
7232 && fcode != BUILT_IN_EXECLP
7233 && fcode != BUILT_IN_EXECLE
7234 && fcode != BUILT_IN_EXECVP
7235 && fcode != BUILT_IN_EXECVE
7236 && !ALLOCA_FUNCTION_CODE_P (fcode)
7237 && fcode != BUILT_IN_FREE)
7238 return expand_call (exp, target, ignore);
7240 /* The built-in function expanders test for target == const0_rtx
7241 to determine whether the function's result will be ignored. */
7242 if (ignore)
7243 target = const0_rtx;
7245 /* If the result of a pure or const built-in function is ignored, and
7246 none of its arguments are volatile, we can avoid expanding the
7247 built-in call and just evaluate the arguments for side-effects. */
7248 if (target == const0_rtx
7249 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7250 && !(flags & ECF_LOOPING_CONST_OR_PURE))
7252 bool volatilep = false;
7253 tree arg;
7254 call_expr_arg_iterator iter;
7256 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7257 if (TREE_THIS_VOLATILE (arg))
7259 volatilep = true;
7260 break;
7263 if (! volatilep)
7265 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7266 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7267 return const0_rtx;
7271 switch (fcode)
7273 CASE_FLT_FN (BUILT_IN_FABS):
7274 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7275 case BUILT_IN_FABSD32:
7276 case BUILT_IN_FABSD64:
7277 case BUILT_IN_FABSD128:
7278 target = expand_builtin_fabs (exp, target, subtarget);
7279 if (target)
7280 return target;
7281 break;
7283 CASE_FLT_FN (BUILT_IN_COPYSIGN):
7284 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7285 target = expand_builtin_copysign (exp, target, subtarget);
7286 if (target)
7287 return target;
7288 break;
7290 /* Just do a normal library call if we were unable to fold
7291 the values. */
7292 CASE_FLT_FN (BUILT_IN_CABS):
7293 break;
7295 CASE_FLT_FN (BUILT_IN_FMA):
7296 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7297 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7298 if (target)
7299 return target;
7300 break;
7302 CASE_FLT_FN (BUILT_IN_ILOGB):
7303 if (! flag_unsafe_math_optimizations)
7304 break;
7305 gcc_fallthrough ();
7306 CASE_FLT_FN (BUILT_IN_ISINF):
7307 CASE_FLT_FN (BUILT_IN_FINITE):
7308 case BUILT_IN_ISFINITE:
7309 case BUILT_IN_ISNORMAL:
7310 target = expand_builtin_interclass_mathfn (exp, target);
7311 if (target)
7312 return target;
7313 break;
7315 CASE_FLT_FN (BUILT_IN_ICEIL):
7316 CASE_FLT_FN (BUILT_IN_LCEIL):
7317 CASE_FLT_FN (BUILT_IN_LLCEIL):
7318 CASE_FLT_FN (BUILT_IN_LFLOOR):
7319 CASE_FLT_FN (BUILT_IN_IFLOOR):
7320 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7321 target = expand_builtin_int_roundingfn (exp, target);
7322 if (target)
7323 return target;
7324 break;
7326 CASE_FLT_FN (BUILT_IN_IRINT):
7327 CASE_FLT_FN (BUILT_IN_LRINT):
7328 CASE_FLT_FN (BUILT_IN_LLRINT):
7329 CASE_FLT_FN (BUILT_IN_IROUND):
7330 CASE_FLT_FN (BUILT_IN_LROUND):
7331 CASE_FLT_FN (BUILT_IN_LLROUND):
7332 target = expand_builtin_int_roundingfn_2 (exp, target);
7333 if (target)
7334 return target;
7335 break;
7337 CASE_FLT_FN (BUILT_IN_POWI):
7338 target = expand_builtin_powi (exp, target);
7339 if (target)
7340 return target;
7341 break;
7343 CASE_FLT_FN (BUILT_IN_CEXPI):
7344 target = expand_builtin_cexpi (exp, target);
7345 gcc_assert (target);
7346 return target;
7348 CASE_FLT_FN (BUILT_IN_SIN):
7349 CASE_FLT_FN (BUILT_IN_COS):
7350 if (! flag_unsafe_math_optimizations)
7351 break;
7352 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7353 if (target)
7354 return target;
7355 break;
7357 CASE_FLT_FN (BUILT_IN_SINCOS):
7358 if (! flag_unsafe_math_optimizations)
7359 break;
7360 target = expand_builtin_sincos (exp);
7361 if (target)
7362 return target;
7363 break;
7365 case BUILT_IN_APPLY_ARGS:
7366 return expand_builtin_apply_args ();
7368 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7369 FUNCTION with a copy of the parameters described by
7370 ARGUMENTS, and ARGSIZE. It returns a block of memory
7371 allocated on the stack into which is stored all the registers
7372 that might possibly be used for returning the result of a
7373 function. ARGUMENTS is the value returned by
7374 __builtin_apply_args. ARGSIZE is the number of bytes of
7375 arguments that must be copied. ??? How should this value be
7376 computed? We'll also need a safe worst case value for varargs
7377 functions. */
7378 case BUILT_IN_APPLY:
7379 if (!validate_arglist (exp, POINTER_TYPE,
7380 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7381 && !validate_arglist (exp, REFERENCE_TYPE,
7382 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7383 return const0_rtx;
7384 else
7386 rtx ops[3];
7388 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7389 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7390 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7392 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7395 /* __builtin_return (RESULT) causes the function to return the
7396 value described by RESULT. RESULT is address of the block of
7397 memory returned by __builtin_apply. */
7398 case BUILT_IN_RETURN:
7399 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7400 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7401 return const0_rtx;
7403 case BUILT_IN_SAVEREGS:
7404 return expand_builtin_saveregs ();
7406 case BUILT_IN_VA_ARG_PACK:
7407 /* All valid uses of __builtin_va_arg_pack () are removed during
7408 inlining. */
7409 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
7410 return const0_rtx;
7412 case BUILT_IN_VA_ARG_PACK_LEN:
7413 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7414 inlining. */
7415 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
7416 return const0_rtx;
7418 /* Return the address of the first anonymous stack arg. */
7419 case BUILT_IN_NEXT_ARG:
7420 if (fold_builtin_next_arg (exp, false))
7421 return const0_rtx;
7422 return expand_builtin_next_arg ();
7424 case BUILT_IN_CLEAR_CACHE:
7425 target = expand_builtin___clear_cache (exp);
7426 if (target)
7427 return target;
7428 break;
7430 case BUILT_IN_CLASSIFY_TYPE:
7431 return expand_builtin_classify_type (exp);
7433 case BUILT_IN_CONSTANT_P:
7434 return const0_rtx;
7436 case BUILT_IN_FRAME_ADDRESS:
7437 case BUILT_IN_RETURN_ADDRESS:
7438 return expand_builtin_frame_address (fndecl, exp);
7440 /* Returns the address of the area where the structure is returned.
7441 0 otherwise. */
7442 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7443 if (call_expr_nargs (exp) != 0
7444 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7445 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7446 return const0_rtx;
7447 else
7448 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7450 CASE_BUILT_IN_ALLOCA:
7451 target = expand_builtin_alloca (exp);
7452 if (target)
7453 return target;
7454 break;
7456 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7457 return expand_asan_emit_allocas_unpoison (exp);
7459 case BUILT_IN_STACK_SAVE:
7460 return expand_stack_save ();
7462 case BUILT_IN_STACK_RESTORE:
7463 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7464 return const0_rtx;
7466 case BUILT_IN_BSWAP16:
7467 case BUILT_IN_BSWAP32:
7468 case BUILT_IN_BSWAP64:
7469 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7470 if (target)
7471 return target;
7472 break;
7474 CASE_INT_FN (BUILT_IN_FFS):
7475 target = expand_builtin_unop (target_mode, exp, target,
7476 subtarget, ffs_optab);
7477 if (target)
7478 return target;
7479 break;
7481 CASE_INT_FN (BUILT_IN_CLZ):
7482 target = expand_builtin_unop (target_mode, exp, target,
7483 subtarget, clz_optab);
7484 if (target)
7485 return target;
7486 break;
7488 CASE_INT_FN (BUILT_IN_CTZ):
7489 target = expand_builtin_unop (target_mode, exp, target,
7490 subtarget, ctz_optab);
7491 if (target)
7492 return target;
7493 break;
7495 CASE_INT_FN (BUILT_IN_CLRSB):
7496 target = expand_builtin_unop (target_mode, exp, target,
7497 subtarget, clrsb_optab);
7498 if (target)
7499 return target;
7500 break;
7502 CASE_INT_FN (BUILT_IN_POPCOUNT):
7503 target = expand_builtin_unop (target_mode, exp, target,
7504 subtarget, popcount_optab);
7505 if (target)
7506 return target;
7507 break;
7509 CASE_INT_FN (BUILT_IN_PARITY):
7510 target = expand_builtin_unop (target_mode, exp, target,
7511 subtarget, parity_optab);
7512 if (target)
7513 return target;
7514 break;
7516 case BUILT_IN_STRLEN:
7517 target = expand_builtin_strlen (exp, target, target_mode);
7518 if (target)
7519 return target;
7520 break;
7522 case BUILT_IN_STRNLEN:
7523 target = expand_builtin_strnlen (exp, target, target_mode);
7524 if (target)
7525 return target;
7526 break;
7528 case BUILT_IN_STRCAT:
7529 target = expand_builtin_strcat (exp, target);
7530 if (target)
7531 return target;
7532 break;
7534 case BUILT_IN_STRCPY:
7535 target = expand_builtin_strcpy (exp, target);
7536 if (target)
7537 return target;
7538 break;
7540 case BUILT_IN_STRNCAT:
7541 target = expand_builtin_strncat (exp, target);
7542 if (target)
7543 return target;
7544 break;
7546 case BUILT_IN_STRNCPY:
7547 target = expand_builtin_strncpy (exp, target);
7548 if (target)
7549 return target;
7550 break;
7552 case BUILT_IN_STPCPY:
7553 target = expand_builtin_stpcpy (exp, target, mode);
7554 if (target)
7555 return target;
7556 break;
7558 case BUILT_IN_STPNCPY:
7559 target = expand_builtin_stpncpy (exp, target);
7560 if (target)
7561 return target;
7562 break;
7564 case BUILT_IN_MEMCHR:
7565 target = expand_builtin_memchr (exp, target);
7566 if (target)
7567 return target;
7568 break;
7570 case BUILT_IN_MEMCPY:
7571 target = expand_builtin_memcpy (exp, target);
7572 if (target)
7573 return target;
7574 break;
7576 case BUILT_IN_MEMMOVE:
7577 target = expand_builtin_memmove (exp, target);
7578 if (target)
7579 return target;
7580 break;
7582 case BUILT_IN_MEMPCPY:
7583 target = expand_builtin_mempcpy (exp, target);
7584 if (target)
7585 return target;
7586 break;
7588 case BUILT_IN_MEMSET:
7589 target = expand_builtin_memset (exp, target, mode);
7590 if (target)
7591 return target;
7592 break;
7594 case BUILT_IN_BZERO:
7595 target = expand_builtin_bzero (exp);
7596 if (target)
7597 return target;
7598 break;
7600 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7601 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
7602 when changing it to a strcmp call. */
7603 case BUILT_IN_STRCMP_EQ:
7604 target = expand_builtin_memcmp (exp, target, true);
7605 if (target)
7606 return target;
7608 /* Change this call back to a BUILT_IN_STRCMP. */
7609 TREE_OPERAND (exp, 1)
7610 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7612 /* Delete the last parameter. */
7613 unsigned int i;
7614 vec<tree, va_gc> *arg_vec;
7615 vec_alloc (arg_vec, 2);
7616 for (i = 0; i < 2; i++)
7617 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7618 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7619 /* FALLTHROUGH */
7621 case BUILT_IN_STRCMP:
7622 target = expand_builtin_strcmp (exp, target);
7623 if (target)
7624 return target;
7625 break;
7627 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7628 back to a BUILT_IN_STRNCMP. */
7629 case BUILT_IN_STRNCMP_EQ:
7630 target = expand_builtin_memcmp (exp, target, true);
7631 if (target)
7632 return target;
7634 /* Change it back to a BUILT_IN_STRNCMP. */
7635 TREE_OPERAND (exp, 1)
7636 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7637 /* FALLTHROUGH */
7639 case BUILT_IN_STRNCMP:
7640 target = expand_builtin_strncmp (exp, target, mode);
7641 if (target)
7642 return target;
7643 break;
7645 case BUILT_IN_BCMP:
7646 case BUILT_IN_MEMCMP:
7647 case BUILT_IN_MEMCMP_EQ:
7648 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7649 if (target)
7650 return target;
7651 if (fcode == BUILT_IN_MEMCMP_EQ)
7653 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7654 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7656 break;
7658 case BUILT_IN_SETJMP:
7659 /* This should have been lowered to the builtins below. */
7660 gcc_unreachable ();
7662 case BUILT_IN_SETJMP_SETUP:
7663 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7664 and the receiver label. */
7665 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7667 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7668 VOIDmode, EXPAND_NORMAL);
7669 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7670 rtx_insn *label_r = label_rtx (label);
7672 /* This is copied from the handling of non-local gotos. */
7673 expand_builtin_setjmp_setup (buf_addr, label_r);
7674 nonlocal_goto_handler_labels
7675 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7676 nonlocal_goto_handler_labels);
7677 /* ??? Do not let expand_label treat us as such since we would
7678 not want to be both on the list of non-local labels and on
7679 the list of forced labels. */
7680 FORCED_LABEL (label) = 0;
7681 return const0_rtx;
7683 break;
7685 case BUILT_IN_SETJMP_RECEIVER:
7686 /* __builtin_setjmp_receiver is passed the receiver label. */
7687 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7689 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7690 rtx_insn *label_r = label_rtx (label);
7692 expand_builtin_setjmp_receiver (label_r);
7693 return const0_rtx;
7695 break;
7697 /* __builtin_longjmp is passed a pointer to an array of five words.
7698 It's similar to the C library longjmp function but works with
7699 __builtin_setjmp above. */
7700 case BUILT_IN_LONGJMP:
7701 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7703 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7704 VOIDmode, EXPAND_NORMAL);
7705 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7707 if (value != const1_rtx)
7709 error ("%<__builtin_longjmp%> second argument must be 1");
7710 return const0_rtx;
7713 expand_builtin_longjmp (buf_addr, value);
7714 return const0_rtx;
7716 break;
7718 case BUILT_IN_NONLOCAL_GOTO:
7719 target = expand_builtin_nonlocal_goto (exp);
7720 if (target)
7721 return target;
7722 break;
7724 /* This updates the setjmp buffer that is its argument with the value
7725 of the current stack pointer. */
7726 case BUILT_IN_UPDATE_SETJMP_BUF:
7727 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7729 rtx buf_addr
7730 = expand_normal (CALL_EXPR_ARG (exp, 0));
7732 expand_builtin_update_setjmp_buf (buf_addr);
7733 return const0_rtx;
7735 break;
7737 case BUILT_IN_TRAP:
7738 expand_builtin_trap ();
7739 return const0_rtx;
7741 case BUILT_IN_UNREACHABLE:
7742 expand_builtin_unreachable ();
7743 return const0_rtx;
7745 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7746 case BUILT_IN_SIGNBITD32:
7747 case BUILT_IN_SIGNBITD64:
7748 case BUILT_IN_SIGNBITD128:
7749 target = expand_builtin_signbit (exp, target);
7750 if (target)
7751 return target;
7752 break;
7754 /* Various hooks for the DWARF 2 __throw routine. */
7755 case BUILT_IN_UNWIND_INIT:
7756 expand_builtin_unwind_init ();
7757 return const0_rtx;
7758 case BUILT_IN_DWARF_CFA:
7759 return virtual_cfa_rtx;
7760 #ifdef DWARF2_UNWIND_INFO
7761 case BUILT_IN_DWARF_SP_COLUMN:
7762 return expand_builtin_dwarf_sp_column ();
7763 case BUILT_IN_INIT_DWARF_REG_SIZES:
7764 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7765 return const0_rtx;
7766 #endif
7767 case BUILT_IN_FROB_RETURN_ADDR:
7768 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7769 case BUILT_IN_EXTRACT_RETURN_ADDR:
7770 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7771 case BUILT_IN_EH_RETURN:
7772 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7773 CALL_EXPR_ARG (exp, 1));
7774 return const0_rtx;
7775 case BUILT_IN_EH_RETURN_DATA_REGNO:
7776 return expand_builtin_eh_return_data_regno (exp);
7777 case BUILT_IN_EXTEND_POINTER:
7778 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7779 case BUILT_IN_EH_POINTER:
7780 return expand_builtin_eh_pointer (exp);
7781 case BUILT_IN_EH_FILTER:
7782 return expand_builtin_eh_filter (exp);
7783 case BUILT_IN_EH_COPY_VALUES:
7784 return expand_builtin_eh_copy_values (exp);
7786 case BUILT_IN_VA_START:
7787 return expand_builtin_va_start (exp);
7788 case BUILT_IN_VA_END:
7789 return expand_builtin_va_end (exp);
7790 case BUILT_IN_VA_COPY:
7791 return expand_builtin_va_copy (exp);
7792 case BUILT_IN_EXPECT:
7793 return expand_builtin_expect (exp, target);
7794 case BUILT_IN_EXPECT_WITH_PROBABILITY:
7795 return expand_builtin_expect_with_probability (exp, target);
7796 case BUILT_IN_ASSUME_ALIGNED:
7797 return expand_builtin_assume_aligned (exp, target);
7798 case BUILT_IN_PREFETCH:
7799 expand_builtin_prefetch (exp);
7800 return const0_rtx;
7802 case BUILT_IN_INIT_TRAMPOLINE:
7803 return expand_builtin_init_trampoline (exp, true);
7804 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7805 return expand_builtin_init_trampoline (exp, false);
7806 case BUILT_IN_ADJUST_TRAMPOLINE:
7807 return expand_builtin_adjust_trampoline (exp);
7809 case BUILT_IN_INIT_DESCRIPTOR:
7810 return expand_builtin_init_descriptor (exp);
7811 case BUILT_IN_ADJUST_DESCRIPTOR:
7812 return expand_builtin_adjust_descriptor (exp);
7814 case BUILT_IN_FORK:
7815 case BUILT_IN_EXECL:
7816 case BUILT_IN_EXECV:
7817 case BUILT_IN_EXECLP:
7818 case BUILT_IN_EXECLE:
7819 case BUILT_IN_EXECVP:
7820 case BUILT_IN_EXECVE:
7821 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7822 if (target)
7823 return target;
7824 break;
7826 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7827 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7828 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7829 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7830 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7831 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7832 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7833 if (target)
7834 return target;
7835 break;
7837 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7838 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7839 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7840 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7841 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7842 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7843 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7844 if (target)
7845 return target;
7846 break;
7848 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7849 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7850 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7851 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7852 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7853 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7854 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7855 if (target)
7856 return target;
7857 break;
7859 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7860 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7861 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7862 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7863 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7864 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7865 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7866 if (target)
7867 return target;
7868 break;
7870 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7871 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7872 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7873 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7874 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7875 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7876 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7877 if (target)
7878 return target;
7879 break;
7881 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7882 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7883 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7884 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7885 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7886 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7887 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7888 if (target)
7889 return target;
7890 break;
7892 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7893 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7894 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7895 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7896 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7897 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7898 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7899 if (target)
7900 return target;
7901 break;
7903 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7904 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7905 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7906 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7907 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7908 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7909 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7910 if (target)
7911 return target;
7912 break;
7914 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7915 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7916 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7917 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7918 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7919 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7920 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7921 if (target)
7922 return target;
7923 break;
7925 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7926 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7927 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7928 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7929 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7930 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7931 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7932 if (target)
7933 return target;
7934 break;
7936 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7937 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7938 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7939 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7940 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7941 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7942 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7943 if (target)
7944 return target;
7945 break;
7947 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7948 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7949 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7950 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7951 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7952 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7953 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7954 if (target)
7955 return target;
7956 break;
7958 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7959 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7960 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7961 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7962 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7963 if (mode == VOIDmode)
7964 mode = TYPE_MODE (boolean_type_node);
7965 if (!target || !register_operand (target, mode))
7966 target = gen_reg_rtx (mode);
7968 mode = get_builtin_sync_mode
7969 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7970 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7971 if (target)
7972 return target;
7973 break;
7975 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7976 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7977 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7978 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7979 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7980 mode = get_builtin_sync_mode
7981 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7982 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7983 if (target)
7984 return target;
7985 break;
7987 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7988 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7989 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7990 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7991 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7992 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7993 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7994 if (target)
7995 return target;
7996 break;
7998 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7999 case BUILT_IN_SYNC_LOCK_RELEASE_2:
8000 case BUILT_IN_SYNC_LOCK_RELEASE_4:
8001 case BUILT_IN_SYNC_LOCK_RELEASE_8:
8002 case BUILT_IN_SYNC_LOCK_RELEASE_16:
8003 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
8004 expand_builtin_sync_lock_release (mode, exp);
8005 return const0_rtx;
8007 case BUILT_IN_SYNC_SYNCHRONIZE:
8008 expand_builtin_sync_synchronize ();
8009 return const0_rtx;
8011 case BUILT_IN_ATOMIC_EXCHANGE_1:
8012 case BUILT_IN_ATOMIC_EXCHANGE_2:
8013 case BUILT_IN_ATOMIC_EXCHANGE_4:
8014 case BUILT_IN_ATOMIC_EXCHANGE_8:
8015 case BUILT_IN_ATOMIC_EXCHANGE_16:
8016 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
8017 target = expand_builtin_atomic_exchange (mode, exp, target);
8018 if (target)
8019 return target;
8020 break;
8022 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
8023 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
8024 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
8025 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
8026 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
8028 unsigned int nargs, z;
8029 vec<tree, va_gc> *vec;
8031 mode =
8032 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
8033 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
8034 if (target)
8035 return target;
8037 /* If this is turned into an external library call, the weak parameter
8038 must be dropped to match the expected parameter list. */
8039 nargs = call_expr_nargs (exp);
8040 vec_alloc (vec, nargs - 1);
8041 for (z = 0; z < 3; z++)
8042 vec->quick_push (CALL_EXPR_ARG (exp, z));
8043 /* Skip the boolean weak parameter. */
8044 for (z = 4; z < 6; z++)
8045 vec->quick_push (CALL_EXPR_ARG (exp, z));
8046 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
8047 break;
8050 case BUILT_IN_ATOMIC_LOAD_1:
8051 case BUILT_IN_ATOMIC_LOAD_2:
8052 case BUILT_IN_ATOMIC_LOAD_4:
8053 case BUILT_IN_ATOMIC_LOAD_8:
8054 case BUILT_IN_ATOMIC_LOAD_16:
8055 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
8056 target = expand_builtin_atomic_load (mode, exp, target);
8057 if (target)
8058 return target;
8059 break;
8061 case BUILT_IN_ATOMIC_STORE_1:
8062 case BUILT_IN_ATOMIC_STORE_2:
8063 case BUILT_IN_ATOMIC_STORE_4:
8064 case BUILT_IN_ATOMIC_STORE_8:
8065 case BUILT_IN_ATOMIC_STORE_16:
8066 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
8067 target = expand_builtin_atomic_store (mode, exp);
8068 if (target)
8069 return const0_rtx;
8070 break;
8072 case BUILT_IN_ATOMIC_ADD_FETCH_1:
8073 case BUILT_IN_ATOMIC_ADD_FETCH_2:
8074 case BUILT_IN_ATOMIC_ADD_FETCH_4:
8075 case BUILT_IN_ATOMIC_ADD_FETCH_8:
8076 case BUILT_IN_ATOMIC_ADD_FETCH_16:
8078 enum built_in_function lib;
8079 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
8080 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
8081 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
8082 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
8083 ignore, lib);
8084 if (target)
8085 return target;
8086 break;
8088 case BUILT_IN_ATOMIC_SUB_FETCH_1:
8089 case BUILT_IN_ATOMIC_SUB_FETCH_2:
8090 case BUILT_IN_ATOMIC_SUB_FETCH_4:
8091 case BUILT_IN_ATOMIC_SUB_FETCH_8:
8092 case BUILT_IN_ATOMIC_SUB_FETCH_16:
8094 enum built_in_function lib;
8095 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
8096 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
8097 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
8098 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
8099 ignore, lib);
8100 if (target)
8101 return target;
8102 break;
8104 case BUILT_IN_ATOMIC_AND_FETCH_1:
8105 case BUILT_IN_ATOMIC_AND_FETCH_2:
8106 case BUILT_IN_ATOMIC_AND_FETCH_4:
8107 case BUILT_IN_ATOMIC_AND_FETCH_8:
8108 case BUILT_IN_ATOMIC_AND_FETCH_16:
8110 enum built_in_function lib;
8111 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
8112 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
8113 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
8114 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
8115 ignore, lib);
8116 if (target)
8117 return target;
8118 break;
8120 case BUILT_IN_ATOMIC_NAND_FETCH_1:
8121 case BUILT_IN_ATOMIC_NAND_FETCH_2:
8122 case BUILT_IN_ATOMIC_NAND_FETCH_4:
8123 case BUILT_IN_ATOMIC_NAND_FETCH_8:
8124 case BUILT_IN_ATOMIC_NAND_FETCH_16:
8126 enum built_in_function lib;
8127 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
8128 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
8129 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
8130 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
8131 ignore, lib);
8132 if (target)
8133 return target;
8134 break;
8136 case BUILT_IN_ATOMIC_XOR_FETCH_1:
8137 case BUILT_IN_ATOMIC_XOR_FETCH_2:
8138 case BUILT_IN_ATOMIC_XOR_FETCH_4:
8139 case BUILT_IN_ATOMIC_XOR_FETCH_8:
8140 case BUILT_IN_ATOMIC_XOR_FETCH_16:
8142 enum built_in_function lib;
8143 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
8144 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
8145 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
8146 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
8147 ignore, lib);
8148 if (target)
8149 return target;
8150 break;
8152 case BUILT_IN_ATOMIC_OR_FETCH_1:
8153 case BUILT_IN_ATOMIC_OR_FETCH_2:
8154 case BUILT_IN_ATOMIC_OR_FETCH_4:
8155 case BUILT_IN_ATOMIC_OR_FETCH_8:
8156 case BUILT_IN_ATOMIC_OR_FETCH_16:
8158 enum built_in_function lib;
8159 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
8160 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
8161 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
8162 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
8163 ignore, lib);
8164 if (target)
8165 return target;
8166 break;
8168 case BUILT_IN_ATOMIC_FETCH_ADD_1:
8169 case BUILT_IN_ATOMIC_FETCH_ADD_2:
8170 case BUILT_IN_ATOMIC_FETCH_ADD_4:
8171 case BUILT_IN_ATOMIC_FETCH_ADD_8:
8172 case BUILT_IN_ATOMIC_FETCH_ADD_16:
8173 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
8174 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
8175 ignore, BUILT_IN_NONE);
8176 if (target)
8177 return target;
8178 break;
8180 case BUILT_IN_ATOMIC_FETCH_SUB_1:
8181 case BUILT_IN_ATOMIC_FETCH_SUB_2:
8182 case BUILT_IN_ATOMIC_FETCH_SUB_4:
8183 case BUILT_IN_ATOMIC_FETCH_SUB_8:
8184 case BUILT_IN_ATOMIC_FETCH_SUB_16:
8185 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
8186 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
8187 ignore, BUILT_IN_NONE);
8188 if (target)
8189 return target;
8190 break;
8192 case BUILT_IN_ATOMIC_FETCH_AND_1:
8193 case BUILT_IN_ATOMIC_FETCH_AND_2:
8194 case BUILT_IN_ATOMIC_FETCH_AND_4:
8195 case BUILT_IN_ATOMIC_FETCH_AND_8:
8196 case BUILT_IN_ATOMIC_FETCH_AND_16:
8197 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8198 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8199 ignore, BUILT_IN_NONE);
8200 if (target)
8201 return target;
8202 break;
8204 case BUILT_IN_ATOMIC_FETCH_NAND_1:
8205 case BUILT_IN_ATOMIC_FETCH_NAND_2:
8206 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8207 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8208 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8209 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8210 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8211 ignore, BUILT_IN_NONE);
8212 if (target)
8213 return target;
8214 break;
8216 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8217 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8218 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8219 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8220 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8221 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8222 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8223 ignore, BUILT_IN_NONE);
8224 if (target)
8225 return target;
8226 break;
8228 case BUILT_IN_ATOMIC_FETCH_OR_1:
8229 case BUILT_IN_ATOMIC_FETCH_OR_2:
8230 case BUILT_IN_ATOMIC_FETCH_OR_4:
8231 case BUILT_IN_ATOMIC_FETCH_OR_8:
8232 case BUILT_IN_ATOMIC_FETCH_OR_16:
8233 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8234 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8235 ignore, BUILT_IN_NONE);
8236 if (target)
8237 return target;
8238 break;
8240 case BUILT_IN_ATOMIC_TEST_AND_SET:
8241 return expand_builtin_atomic_test_and_set (exp, target);
8243 case BUILT_IN_ATOMIC_CLEAR:
8244 return expand_builtin_atomic_clear (exp);
8246 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8247 return expand_builtin_atomic_always_lock_free (exp);
8249 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8250 target = expand_builtin_atomic_is_lock_free (exp);
8251 if (target)
8252 return target;
8253 break;
8255 case BUILT_IN_ATOMIC_THREAD_FENCE:
8256 expand_builtin_atomic_thread_fence (exp);
8257 return const0_rtx;
8259 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8260 expand_builtin_atomic_signal_fence (exp);
8261 return const0_rtx;
8263 case BUILT_IN_OBJECT_SIZE:
8264 return expand_builtin_object_size (exp);
8266 case BUILT_IN_MEMCPY_CHK:
8267 case BUILT_IN_MEMPCPY_CHK:
8268 case BUILT_IN_MEMMOVE_CHK:
8269 case BUILT_IN_MEMSET_CHK:
8270 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8271 if (target)
8272 return target;
8273 break;
8275 case BUILT_IN_STRCPY_CHK:
8276 case BUILT_IN_STPCPY_CHK:
8277 case BUILT_IN_STRNCPY_CHK:
8278 case BUILT_IN_STPNCPY_CHK:
8279 case BUILT_IN_STRCAT_CHK:
8280 case BUILT_IN_STRNCAT_CHK:
8281 case BUILT_IN_SNPRINTF_CHK:
8282 case BUILT_IN_VSNPRINTF_CHK:
8283 maybe_emit_chk_warning (exp, fcode);
8284 break;
8286 case BUILT_IN_SPRINTF_CHK:
8287 case BUILT_IN_VSPRINTF_CHK:
8288 maybe_emit_sprintf_chk_warning (exp, fcode);
8289 break;
8291 case BUILT_IN_FREE:
8292 if (warn_free_nonheap_object)
8293 maybe_emit_free_warning (exp);
8294 break;
8296 case BUILT_IN_THREAD_POINTER:
8297 return expand_builtin_thread_pointer (exp, target);
8299 case BUILT_IN_SET_THREAD_POINTER:
8300 expand_builtin_set_thread_pointer (exp);
8301 return const0_rtx;
8303 case BUILT_IN_ACC_ON_DEVICE:
8304 /* Do library call, if we failed to expand the builtin when
8305 folding. */
8306 break;
8308 case BUILT_IN_GOACC_PARLEVEL_ID:
8309 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8310 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8312 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8313 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8315 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8316 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8317 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8318 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8319 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8320 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8321 return expand_speculation_safe_value (mode, exp, target, ignore);
8323 default: /* just do library call, if unknown builtin */
8324 break;
8327 /* The switch statement above can drop through to cause the function
8328 to be called normally. */
8329 return expand_call (exp, target, ignore);
8332 /* Determine whether a tree node represents a call to a built-in
8333 function. If the tree T is a call to a built-in function with
8334 the right number of arguments of the appropriate types, return
8335 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8336 Otherwise the return value is END_BUILTINS. */
8338 enum built_in_function
8339 builtin_mathfn_code (const_tree t)
8341 const_tree fndecl, arg, parmlist;
8342 const_tree argtype, parmtype;
8343 const_call_expr_arg_iterator iter;
8345 if (TREE_CODE (t) != CALL_EXPR)
8346 return END_BUILTINS;
8348 fndecl = get_callee_fndecl (t);
8349 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8350 return END_BUILTINS;
8352 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8353 init_const_call_expr_arg_iterator (t, &iter);
8354 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8356 /* If a function doesn't take a variable number of arguments,
8357 the last element in the list will have type `void'. */
8358 parmtype = TREE_VALUE (parmlist);
8359 if (VOID_TYPE_P (parmtype))
8361 if (more_const_call_expr_args_p (&iter))
8362 return END_BUILTINS;
8363 return DECL_FUNCTION_CODE (fndecl);
8366 if (! more_const_call_expr_args_p (&iter))
8367 return END_BUILTINS;
8369 arg = next_const_call_expr_arg (&iter);
8370 argtype = TREE_TYPE (arg);
8372 if (SCALAR_FLOAT_TYPE_P (parmtype))
8374 if (! SCALAR_FLOAT_TYPE_P (argtype))
8375 return END_BUILTINS;
8377 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8379 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8380 return END_BUILTINS;
8382 else if (POINTER_TYPE_P (parmtype))
8384 if (! POINTER_TYPE_P (argtype))
8385 return END_BUILTINS;
8387 else if (INTEGRAL_TYPE_P (parmtype))
8389 if (! INTEGRAL_TYPE_P (argtype))
8390 return END_BUILTINS;
8392 else
8393 return END_BUILTINS;
8396 /* Variable-length argument list. */
8397 return DECL_FUNCTION_CODE (fndecl);
8400 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8401 evaluate to a constant. */
8403 static tree
8404 fold_builtin_constant_p (tree arg)
8406 /* We return 1 for a numeric type that's known to be a constant
8407 value at compile-time or for an aggregate type that's a
8408 literal constant. */
8409 STRIP_NOPS (arg);
8411 /* If we know this is a constant, emit the constant of one. */
8412 if (CONSTANT_CLASS_P (arg)
8413 || (TREE_CODE (arg) == CONSTRUCTOR
8414 && TREE_CONSTANT (arg)))
8415 return integer_one_node;
8416 if (TREE_CODE (arg) == ADDR_EXPR)
8418 tree op = TREE_OPERAND (arg, 0);
8419 if (TREE_CODE (op) == STRING_CST
8420 || (TREE_CODE (op) == ARRAY_REF
8421 && integer_zerop (TREE_OPERAND (op, 1))
8422 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8423 return integer_one_node;
8426 /* If this expression has side effects, show we don't know it to be a
8427 constant. Likewise if it's a pointer or aggregate type since in
8428 those case we only want literals, since those are only optimized
8429 when generating RTL, not later.
8430 And finally, if we are compiling an initializer, not code, we
8431 need to return a definite result now; there's not going to be any
8432 more optimization done. */
8433 if (TREE_SIDE_EFFECTS (arg)
8434 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8435 || POINTER_TYPE_P (TREE_TYPE (arg))
8436 || cfun == 0
8437 || folding_initializer
8438 || force_folding_builtin_constant_p)
8439 return integer_zero_node;
8441 return NULL_TREE;
8444 /* Create builtin_expect or builtin_expect_with_probability
8445 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8446 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8447 builtin_expect_with_probability instead uses third argument as PROBABILITY
8448 value. */
8450 static tree
8451 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8452 tree predictor, tree probability)
8454 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
8456 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
8457 : BUILT_IN_EXPECT_WITH_PROBABILITY);
8458 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8459 ret_type = TREE_TYPE (TREE_TYPE (fn));
8460 pred_type = TREE_VALUE (arg_types);
8461 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8463 pred = fold_convert_loc (loc, pred_type, pred);
8464 expected = fold_convert_loc (loc, expected_type, expected);
8466 if (probability)
8467 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
8468 else
8469 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8470 predictor);
8472 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8473 build_int_cst (ret_type, 0));
8476 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
8477 NULL_TREE if no simplification is possible. */
8479 tree
8480 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
8481 tree arg3)
8483 tree inner, fndecl, inner_arg0;
8484 enum tree_code code;
8486 /* Distribute the expected value over short-circuiting operators.
8487 See through the cast from truthvalue_type_node to long. */
8488 inner_arg0 = arg0;
8489 while (CONVERT_EXPR_P (inner_arg0)
8490 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8491 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8492 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8494 /* If this is a builtin_expect within a builtin_expect keep the
8495 inner one. See through a comparison against a constant. It
8496 might have been added to create a thruthvalue. */
8497 inner = inner_arg0;
8499 if (COMPARISON_CLASS_P (inner)
8500 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8501 inner = TREE_OPERAND (inner, 0);
8503 if (TREE_CODE (inner) == CALL_EXPR
8504 && (fndecl = get_callee_fndecl (inner))
8505 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
8506 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
8507 return arg0;
8509 inner = inner_arg0;
8510 code = TREE_CODE (inner);
8511 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8513 tree op0 = TREE_OPERAND (inner, 0);
8514 tree op1 = TREE_OPERAND (inner, 1);
8515 arg1 = save_expr (arg1);
8517 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
8518 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
8519 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8521 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8524 /* If the argument isn't invariant then there's nothing else we can do. */
8525 if (!TREE_CONSTANT (inner_arg0))
8526 return NULL_TREE;
8528 /* If we expect that a comparison against the argument will fold to
8529 a constant return the constant. In practice, this means a true
8530 constant or the address of a non-weak symbol. */
8531 inner = inner_arg0;
8532 STRIP_NOPS (inner);
8533 if (TREE_CODE (inner) == ADDR_EXPR)
8537 inner = TREE_OPERAND (inner, 0);
8539 while (TREE_CODE (inner) == COMPONENT_REF
8540 || TREE_CODE (inner) == ARRAY_REF);
8541 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8542 return NULL_TREE;
8545 /* Otherwise, ARG0 already has the proper type for the return value. */
8546 return arg0;
8549 /* Fold a call to __builtin_classify_type with argument ARG. */
8551 static tree
8552 fold_builtin_classify_type (tree arg)
8554 if (arg == 0)
8555 return build_int_cst (integer_type_node, no_type_class);
8557 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8560 /* Fold a call to __builtin_strlen with argument ARG. */
8562 static tree
8563 fold_builtin_strlen (location_t loc, tree type, tree arg)
8565 if (!validate_arg (arg, POINTER_TYPE))
8566 return NULL_TREE;
8567 else
8569 c_strlen_data data;
8570 memset (&data, 0, sizeof (c_strlen_data));
8571 tree len = c_strlen (arg, 0, &data);
8573 if (len)
8574 return fold_convert_loc (loc, type, len);
8576 if (!data.decl)
8577 c_strlen (arg, 1, &data);
8579 if (data.decl)
8581 if (EXPR_HAS_LOCATION (arg))
8582 loc = EXPR_LOCATION (arg);
8583 else if (loc == UNKNOWN_LOCATION)
8584 loc = input_location;
8585 warn_string_no_nul (loc, "strlen", arg, data.decl);
8588 return NULL_TREE;
8592 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8594 static tree
8595 fold_builtin_inf (location_t loc, tree type, int warn)
8597 REAL_VALUE_TYPE real;
8599 /* __builtin_inff is intended to be usable to define INFINITY on all
8600 targets. If an infinity is not available, INFINITY expands "to a
8601 positive constant of type float that overflows at translation
8602 time", footnote "In this case, using INFINITY will violate the
8603 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8604 Thus we pedwarn to ensure this constraint violation is
8605 diagnosed. */
8606 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8607 pedwarn (loc, 0, "target format does not support infinity");
8609 real_inf (&real);
8610 return build_real (type, real);
8613 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8614 NULL_TREE if no simplification can be made. */
8616 static tree
8617 fold_builtin_sincos (location_t loc,
8618 tree arg0, tree arg1, tree arg2)
8620 tree type;
8621 tree fndecl, call = NULL_TREE;
8623 if (!validate_arg (arg0, REAL_TYPE)
8624 || !validate_arg (arg1, POINTER_TYPE)
8625 || !validate_arg (arg2, POINTER_TYPE))
8626 return NULL_TREE;
8628 type = TREE_TYPE (arg0);
8630 /* Calculate the result when the argument is a constant. */
8631 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8632 if (fn == END_BUILTINS)
8633 return NULL_TREE;
8635 /* Canonicalize sincos to cexpi. */
8636 if (TREE_CODE (arg0) == REAL_CST)
8638 tree complex_type = build_complex_type (type);
8639 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8641 if (!call)
8643 if (!targetm.libc_has_function (function_c99_math_complex)
8644 || !builtin_decl_implicit_p (fn))
8645 return NULL_TREE;
8646 fndecl = builtin_decl_explicit (fn);
8647 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8648 call = builtin_save_expr (call);
8651 tree ptype = build_pointer_type (type);
8652 arg1 = fold_convert (ptype, arg1);
8653 arg2 = fold_convert (ptype, arg2);
8654 return build2 (COMPOUND_EXPR, void_type_node,
8655 build2 (MODIFY_EXPR, void_type_node,
8656 build_fold_indirect_ref_loc (loc, arg1),
8657 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8658 build2 (MODIFY_EXPR, void_type_node,
8659 build_fold_indirect_ref_loc (loc, arg2),
8660 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8663 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8664 Return NULL_TREE if no simplification can be made. */
8666 static tree
8667 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8669 if (!validate_arg (arg1, POINTER_TYPE)
8670 || !validate_arg (arg2, POINTER_TYPE)
8671 || !validate_arg (len, INTEGER_TYPE))
8672 return NULL_TREE;
8674 /* If the LEN parameter is zero, return zero. */
8675 if (integer_zerop (len))
8676 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8677 arg1, arg2);
8679 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8680 if (operand_equal_p (arg1, arg2, 0))
8681 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8683 /* If len parameter is one, return an expression corresponding to
8684 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8685 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8687 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8688 tree cst_uchar_ptr_node
8689 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8691 tree ind1
8692 = fold_convert_loc (loc, integer_type_node,
8693 build1 (INDIRECT_REF, cst_uchar_node,
8694 fold_convert_loc (loc,
8695 cst_uchar_ptr_node,
8696 arg1)));
8697 tree ind2
8698 = fold_convert_loc (loc, integer_type_node,
8699 build1 (INDIRECT_REF, cst_uchar_node,
8700 fold_convert_loc (loc,
8701 cst_uchar_ptr_node,
8702 arg2)));
8703 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8706 return NULL_TREE;
8709 /* Fold a call to builtin isascii with argument ARG. */
8711 static tree
8712 fold_builtin_isascii (location_t loc, tree arg)
8714 if (!validate_arg (arg, INTEGER_TYPE))
8715 return NULL_TREE;
8716 else
8718 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8719 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8720 build_int_cst (integer_type_node,
8721 ~ (unsigned HOST_WIDE_INT) 0x7f));
8722 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8723 arg, integer_zero_node);
8727 /* Fold a call to builtin toascii with argument ARG. */
8729 static tree
8730 fold_builtin_toascii (location_t loc, tree arg)
8732 if (!validate_arg (arg, INTEGER_TYPE))
8733 return NULL_TREE;
8735 /* Transform toascii(c) -> (c & 0x7f). */
8736 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8737 build_int_cst (integer_type_node, 0x7f));
8740 /* Fold a call to builtin isdigit with argument ARG. */
8742 static tree
8743 fold_builtin_isdigit (location_t loc, tree arg)
8745 if (!validate_arg (arg, INTEGER_TYPE))
8746 return NULL_TREE;
8747 else
8749 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8750 /* According to the C standard, isdigit is unaffected by locale.
8751 However, it definitely is affected by the target character set. */
8752 unsigned HOST_WIDE_INT target_digit0
8753 = lang_hooks.to_target_charset ('0');
8755 if (target_digit0 == 0)
8756 return NULL_TREE;
8758 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8759 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8760 build_int_cst (unsigned_type_node, target_digit0));
8761 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8762 build_int_cst (unsigned_type_node, 9));
8766 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8768 static tree
8769 fold_builtin_fabs (location_t loc, tree arg, tree type)
8771 if (!validate_arg (arg, REAL_TYPE))
8772 return NULL_TREE;
8774 arg = fold_convert_loc (loc, type, arg);
8775 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8778 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8780 static tree
8781 fold_builtin_abs (location_t loc, tree arg, tree type)
8783 if (!validate_arg (arg, INTEGER_TYPE))
8784 return NULL_TREE;
8786 arg = fold_convert_loc (loc, type, arg);
8787 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8790 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8792 static tree
8793 fold_builtin_carg (location_t loc, tree arg, tree type)
8795 if (validate_arg (arg, COMPLEX_TYPE)
8796 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8798 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8800 if (atan2_fn)
8802 tree new_arg = builtin_save_expr (arg);
8803 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8804 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8805 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8809 return NULL_TREE;
8812 /* Fold a call to builtin frexp, we can assume the base is 2. */
8814 static tree
8815 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8817 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8818 return NULL_TREE;
8820 STRIP_NOPS (arg0);
8822 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8823 return NULL_TREE;
8825 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8827 /* Proceed if a valid pointer type was passed in. */
8828 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8830 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8831 tree frac, exp;
8833 switch (value->cl)
8835 case rvc_zero:
8836 /* For +-0, return (*exp = 0, +-0). */
8837 exp = integer_zero_node;
8838 frac = arg0;
8839 break;
8840 case rvc_nan:
8841 case rvc_inf:
8842 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8843 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8844 case rvc_normal:
8846 /* Since the frexp function always expects base 2, and in
8847 GCC normalized significands are already in the range
8848 [0.5, 1.0), we have exactly what frexp wants. */
8849 REAL_VALUE_TYPE frac_rvt = *value;
8850 SET_REAL_EXP (&frac_rvt, 0);
8851 frac = build_real (rettype, frac_rvt);
8852 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8854 break;
8855 default:
8856 gcc_unreachable ();
8859 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8860 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8861 TREE_SIDE_EFFECTS (arg1) = 1;
8862 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8865 return NULL_TREE;
8868 /* Fold a call to builtin modf. */
8870 static tree
8871 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8873 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8874 return NULL_TREE;
8876 STRIP_NOPS (arg0);
8878 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8879 return NULL_TREE;
8881 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8883 /* Proceed if a valid pointer type was passed in. */
8884 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8886 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8887 REAL_VALUE_TYPE trunc, frac;
8889 switch (value->cl)
8891 case rvc_nan:
8892 case rvc_zero:
8893 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8894 trunc = frac = *value;
8895 break;
8896 case rvc_inf:
8897 /* For +-Inf, return (*arg1 = arg0, +-0). */
8898 frac = dconst0;
8899 frac.sign = value->sign;
8900 trunc = *value;
8901 break;
8902 case rvc_normal:
8903 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8904 real_trunc (&trunc, VOIDmode, value);
8905 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8906 /* If the original number was negative and already
8907 integral, then the fractional part is -0.0. */
8908 if (value->sign && frac.cl == rvc_zero)
8909 frac.sign = value->sign;
8910 break;
8913 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8914 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8915 build_real (rettype, trunc));
8916 TREE_SIDE_EFFECTS (arg1) = 1;
8917 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8918 build_real (rettype, frac));
8921 return NULL_TREE;
8924 /* Given a location LOC, an interclass builtin function decl FNDECL
8925 and its single argument ARG, return an folded expression computing
8926 the same, or NULL_TREE if we either couldn't or didn't want to fold
8927 (the latter happen if there's an RTL instruction available). */
8929 static tree
8930 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8932 machine_mode mode;
8934 if (!validate_arg (arg, REAL_TYPE))
8935 return NULL_TREE;
8937 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8938 return NULL_TREE;
8940 mode = TYPE_MODE (TREE_TYPE (arg));
8942 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8944 /* If there is no optab, try generic code. */
8945 switch (DECL_FUNCTION_CODE (fndecl))
8947 tree result;
8949 CASE_FLT_FN (BUILT_IN_ISINF):
8951 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8952 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8953 tree type = TREE_TYPE (arg);
8954 REAL_VALUE_TYPE r;
8955 char buf[128];
8957 if (is_ibm_extended)
8959 /* NaN and Inf are encoded in the high-order double value
8960 only. The low-order value is not significant. */
8961 type = double_type_node;
8962 mode = DFmode;
8963 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8965 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8966 real_from_string (&r, buf);
8967 result = build_call_expr (isgr_fn, 2,
8968 fold_build1_loc (loc, ABS_EXPR, type, arg),
8969 build_real (type, r));
8970 return result;
8972 CASE_FLT_FN (BUILT_IN_FINITE):
8973 case BUILT_IN_ISFINITE:
8975 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8976 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8977 tree type = TREE_TYPE (arg);
8978 REAL_VALUE_TYPE r;
8979 char buf[128];
8981 if (is_ibm_extended)
8983 /* NaN and Inf are encoded in the high-order double value
8984 only. The low-order value is not significant. */
8985 type = double_type_node;
8986 mode = DFmode;
8987 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8989 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8990 real_from_string (&r, buf);
8991 result = build_call_expr (isle_fn, 2,
8992 fold_build1_loc (loc, ABS_EXPR, type, arg),
8993 build_real (type, r));
8994 /*result = fold_build2_loc (loc, UNGT_EXPR,
8995 TREE_TYPE (TREE_TYPE (fndecl)),
8996 fold_build1_loc (loc, ABS_EXPR, type, arg),
8997 build_real (type, r));
8998 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8999 TREE_TYPE (TREE_TYPE (fndecl)),
9000 result);*/
9001 return result;
9003 case BUILT_IN_ISNORMAL:
9005 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9006 islessequal(fabs(x),DBL_MAX). */
9007 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9008 tree type = TREE_TYPE (arg);
9009 tree orig_arg, max_exp, min_exp;
9010 machine_mode orig_mode = mode;
9011 REAL_VALUE_TYPE rmax, rmin;
9012 char buf[128];
9014 orig_arg = arg = builtin_save_expr (arg);
9015 if (is_ibm_extended)
9017 /* Use double to test the normal range of IBM extended
9018 precision. Emin for IBM extended precision is
9019 different to emin for IEEE double, being 53 higher
9020 since the low double exponent is at least 53 lower
9021 than the high double exponent. */
9022 type = double_type_node;
9023 mode = DFmode;
9024 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9026 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
9028 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9029 real_from_string (&rmax, buf);
9030 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9031 real_from_string (&rmin, buf);
9032 max_exp = build_real (type, rmax);
9033 min_exp = build_real (type, rmin);
9035 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
9036 if (is_ibm_extended)
9038 /* Testing the high end of the range is done just using
9039 the high double, using the same test as isfinite().
9040 For the subnormal end of the range we first test the
9041 high double, then if its magnitude is equal to the
9042 limit of 0x1p-969, we test whether the low double is
9043 non-zero and opposite sign to the high double. */
9044 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
9045 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9046 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
9047 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
9048 arg, min_exp);
9049 tree as_complex = build1 (VIEW_CONVERT_EXPR,
9050 complex_double_type_node, orig_arg);
9051 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
9052 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
9053 tree zero = build_real (type, dconst0);
9054 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
9055 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
9056 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
9057 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
9058 fold_build3 (COND_EXPR,
9059 integer_type_node,
9060 hilt, logt, lolt));
9061 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
9062 eq_min, ok_lo);
9063 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
9064 gt_min, eq_min);
9066 else
9068 tree const isge_fn
9069 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9070 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
9072 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
9073 max_exp, min_exp);
9074 return result;
9076 default:
9077 break;
9080 return NULL_TREE;
9083 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9084 ARG is the argument for the call. */
9086 static tree
9087 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9089 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9091 if (!validate_arg (arg, REAL_TYPE))
9092 return NULL_TREE;
9094 switch (builtin_index)
9096 case BUILT_IN_ISINF:
9097 if (!HONOR_INFINITIES (arg))
9098 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9100 return NULL_TREE;
9102 case BUILT_IN_ISINF_SIGN:
9104 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9105 /* In a boolean context, GCC will fold the inner COND_EXPR to
9106 1. So e.g. "if (isinf_sign(x))" would be folded to just
9107 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9108 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
9109 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9110 tree tmp = NULL_TREE;
9112 arg = builtin_save_expr (arg);
9114 if (signbit_fn && isinf_fn)
9116 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9117 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9119 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9120 signbit_call, integer_zero_node);
9121 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9122 isinf_call, integer_zero_node);
9124 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9125 integer_minus_one_node, integer_one_node);
9126 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9127 isinf_call, tmp,
9128 integer_zero_node);
9131 return tmp;
9134 case BUILT_IN_ISFINITE:
9135 if (!HONOR_NANS (arg)
9136 && !HONOR_INFINITIES (arg))
9137 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9139 return NULL_TREE;
9141 case BUILT_IN_ISNAN:
9142 if (!HONOR_NANS (arg))
9143 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9146 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
9147 if (is_ibm_extended)
9149 /* NaN and Inf are encoded in the high-order double value
9150 only. The low-order value is not significant. */
9151 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9154 arg = builtin_save_expr (arg);
9155 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9157 default:
9158 gcc_unreachable ();
9162 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9163 This builtin will generate code to return the appropriate floating
9164 point classification depending on the value of the floating point
9165 number passed in. The possible return values must be supplied as
9166 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9167 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9168 one floating point argument which is "type generic". */
9170 static tree
9171 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9173 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9174 arg, type, res, tmp;
9175 machine_mode mode;
9176 REAL_VALUE_TYPE r;
9177 char buf[128];
9179 /* Verify the required arguments in the original call. */
9180 if (nargs != 6
9181 || !validate_arg (args[0], INTEGER_TYPE)
9182 || !validate_arg (args[1], INTEGER_TYPE)
9183 || !validate_arg (args[2], INTEGER_TYPE)
9184 || !validate_arg (args[3], INTEGER_TYPE)
9185 || !validate_arg (args[4], INTEGER_TYPE)
9186 || !validate_arg (args[5], REAL_TYPE))
9187 return NULL_TREE;
9189 fp_nan = args[0];
9190 fp_infinite = args[1];
9191 fp_normal = args[2];
9192 fp_subnormal = args[3];
9193 fp_zero = args[4];
9194 arg = args[5];
9195 type = TREE_TYPE (arg);
9196 mode = TYPE_MODE (type);
9197 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9199 /* fpclassify(x) ->
9200 isnan(x) ? FP_NAN :
9201 (fabs(x) == Inf ? FP_INFINITE :
9202 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9203 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9205 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9206 build_real (type, dconst0));
9207 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9208 tmp, fp_zero, fp_subnormal);
9210 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9211 real_from_string (&r, buf);
9212 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9213 arg, build_real (type, r));
9214 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9216 if (HONOR_INFINITIES (mode))
9218 real_inf (&r);
9219 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9220 build_real (type, r));
9221 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9222 fp_infinite, res);
9225 if (HONOR_NANS (mode))
9227 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9228 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9231 return res;
9234 /* Fold a call to an unordered comparison function such as
9235 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9236 being called and ARG0 and ARG1 are the arguments for the call.
9237 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9238 the opposite of the desired result. UNORDERED_CODE is used
9239 for modes that can hold NaNs and ORDERED_CODE is used for
9240 the rest. */
9242 static tree
9243 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9244 enum tree_code unordered_code,
9245 enum tree_code ordered_code)
9247 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9248 enum tree_code code;
9249 tree type0, type1;
9250 enum tree_code code0, code1;
9251 tree cmp_type = NULL_TREE;
9253 type0 = TREE_TYPE (arg0);
9254 type1 = TREE_TYPE (arg1);
9256 code0 = TREE_CODE (type0);
9257 code1 = TREE_CODE (type1);
9259 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9260 /* Choose the wider of two real types. */
9261 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9262 ? type0 : type1;
9263 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9264 cmp_type = type0;
9265 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9266 cmp_type = type1;
9268 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9269 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9271 if (unordered_code == UNORDERED_EXPR)
9273 if (!HONOR_NANS (arg0))
9274 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9275 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9278 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9279 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9280 fold_build2_loc (loc, code, type, arg0, arg1));
9283 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9284 arithmetics if it can never overflow, or into internal functions that
9285 return both result of arithmetics and overflowed boolean flag in
9286 a complex integer result, or some other check for overflow.
9287 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9288 checking part of that. */
9290 static tree
9291 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9292 tree arg0, tree arg1, tree arg2)
9294 enum internal_fn ifn = IFN_LAST;
9295 /* The code of the expression corresponding to the type-generic
9296 built-in, or ERROR_MARK for the type-specific ones. */
9297 enum tree_code opcode = ERROR_MARK;
9298 bool ovf_only = false;
9300 switch (fcode)
9302 case BUILT_IN_ADD_OVERFLOW_P:
9303 ovf_only = true;
9304 /* FALLTHRU */
9305 case BUILT_IN_ADD_OVERFLOW:
9306 opcode = PLUS_EXPR;
9307 /* FALLTHRU */
9308 case BUILT_IN_SADD_OVERFLOW:
9309 case BUILT_IN_SADDL_OVERFLOW:
9310 case BUILT_IN_SADDLL_OVERFLOW:
9311 case BUILT_IN_UADD_OVERFLOW:
9312 case BUILT_IN_UADDL_OVERFLOW:
9313 case BUILT_IN_UADDLL_OVERFLOW:
9314 ifn = IFN_ADD_OVERFLOW;
9315 break;
9316 case BUILT_IN_SUB_OVERFLOW_P:
9317 ovf_only = true;
9318 /* FALLTHRU */
9319 case BUILT_IN_SUB_OVERFLOW:
9320 opcode = MINUS_EXPR;
9321 /* FALLTHRU */
9322 case BUILT_IN_SSUB_OVERFLOW:
9323 case BUILT_IN_SSUBL_OVERFLOW:
9324 case BUILT_IN_SSUBLL_OVERFLOW:
9325 case BUILT_IN_USUB_OVERFLOW:
9326 case BUILT_IN_USUBL_OVERFLOW:
9327 case BUILT_IN_USUBLL_OVERFLOW:
9328 ifn = IFN_SUB_OVERFLOW;
9329 break;
9330 case BUILT_IN_MUL_OVERFLOW_P:
9331 ovf_only = true;
9332 /* FALLTHRU */
9333 case BUILT_IN_MUL_OVERFLOW:
9334 opcode = MULT_EXPR;
9335 /* FALLTHRU */
9336 case BUILT_IN_SMUL_OVERFLOW:
9337 case BUILT_IN_SMULL_OVERFLOW:
9338 case BUILT_IN_SMULLL_OVERFLOW:
9339 case BUILT_IN_UMUL_OVERFLOW:
9340 case BUILT_IN_UMULL_OVERFLOW:
9341 case BUILT_IN_UMULLL_OVERFLOW:
9342 ifn = IFN_MUL_OVERFLOW;
9343 break;
9344 default:
9345 gcc_unreachable ();
9348 /* For the "generic" overloads, the first two arguments can have different
9349 types and the last argument determines the target type to use to check
9350 for overflow. The arguments of the other overloads all have the same
9351 type. */
9352 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9354 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9355 arguments are constant, attempt to fold the built-in call into a constant
9356 expression indicating whether or not it detected an overflow. */
9357 if (ovf_only
9358 && TREE_CODE (arg0) == INTEGER_CST
9359 && TREE_CODE (arg1) == INTEGER_CST)
9360 /* Perform the computation in the target type and check for overflow. */
9361 return omit_one_operand_loc (loc, boolean_type_node,
9362 arith_overflowed_p (opcode, type, arg0, arg1)
9363 ? boolean_true_node : boolean_false_node,
9364 arg2);
9366 tree ctype = build_complex_type (type);
9367 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9368 2, arg0, arg1);
9369 tree tgt = save_expr (call);
9370 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9371 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9372 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9374 if (ovf_only)
9375 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9377 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9378 tree store
9379 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9380 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9383 /* Fold a call to __builtin_FILE to a constant string. */
9385 static inline tree
9386 fold_builtin_FILE (location_t loc)
9388 if (const char *fname = LOCATION_FILE (loc))
9390 /* The documentation says this builtin is equivalent to the preprocessor
9391 __FILE__ macro so it appears appropriate to use the same file prefix
9392 mappings. */
9393 fname = remap_macro_filename (fname);
9394 return build_string_literal (strlen (fname) + 1, fname);
9397 return build_string_literal (1, "");
9400 /* Fold a call to __builtin_FUNCTION to a constant string. */
9402 static inline tree
9403 fold_builtin_FUNCTION ()
9405 const char *name = "";
9407 if (current_function_decl)
9408 name = lang_hooks.decl_printable_name (current_function_decl, 0);
9410 return build_string_literal (strlen (name) + 1, name);
9413 /* Fold a call to __builtin_LINE to an integer constant. */
9415 static inline tree
9416 fold_builtin_LINE (location_t loc, tree type)
9418 return build_int_cst (type, LOCATION_LINE (loc));
9421 /* Fold a call to built-in function FNDECL with 0 arguments.
9422 This function returns NULL_TREE if no simplification was possible. */
9424 static tree
9425 fold_builtin_0 (location_t loc, tree fndecl)
9427 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9428 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9429 switch (fcode)
9431 case BUILT_IN_FILE:
9432 return fold_builtin_FILE (loc);
9434 case BUILT_IN_FUNCTION:
9435 return fold_builtin_FUNCTION ();
9437 case BUILT_IN_LINE:
9438 return fold_builtin_LINE (loc, type);
9440 CASE_FLT_FN (BUILT_IN_INF):
9441 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
9442 case BUILT_IN_INFD32:
9443 case BUILT_IN_INFD64:
9444 case BUILT_IN_INFD128:
9445 return fold_builtin_inf (loc, type, true);
9447 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9448 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
9449 return fold_builtin_inf (loc, type, false);
9451 case BUILT_IN_CLASSIFY_TYPE:
9452 return fold_builtin_classify_type (NULL_TREE);
9454 default:
9455 break;
9457 return NULL_TREE;
9460 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9461 This function returns NULL_TREE if no simplification was possible. */
9463 static tree
9464 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9466 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9467 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9469 if (TREE_CODE (arg0) == ERROR_MARK)
9470 return NULL_TREE;
9472 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
9473 return ret;
9475 switch (fcode)
9477 case BUILT_IN_CONSTANT_P:
9479 tree val = fold_builtin_constant_p (arg0);
9481 /* Gimplification will pull the CALL_EXPR for the builtin out of
9482 an if condition. When not optimizing, we'll not CSE it back.
9483 To avoid link error types of regressions, return false now. */
9484 if (!val && !optimize)
9485 val = integer_zero_node;
9487 return val;
9490 case BUILT_IN_CLASSIFY_TYPE:
9491 return fold_builtin_classify_type (arg0);
9493 case BUILT_IN_STRLEN:
9494 return fold_builtin_strlen (loc, type, arg0);
9496 CASE_FLT_FN (BUILT_IN_FABS):
9497 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9498 case BUILT_IN_FABSD32:
9499 case BUILT_IN_FABSD64:
9500 case BUILT_IN_FABSD128:
9501 return fold_builtin_fabs (loc, arg0, type);
9503 case BUILT_IN_ABS:
9504 case BUILT_IN_LABS:
9505 case BUILT_IN_LLABS:
9506 case BUILT_IN_IMAXABS:
9507 return fold_builtin_abs (loc, arg0, type);
9509 CASE_FLT_FN (BUILT_IN_CONJ):
9510 if (validate_arg (arg0, COMPLEX_TYPE)
9511 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9512 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9513 break;
9515 CASE_FLT_FN (BUILT_IN_CREAL):
9516 if (validate_arg (arg0, COMPLEX_TYPE)
9517 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9518 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9519 break;
9521 CASE_FLT_FN (BUILT_IN_CIMAG):
9522 if (validate_arg (arg0, COMPLEX_TYPE)
9523 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9524 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9525 break;
9527 CASE_FLT_FN (BUILT_IN_CARG):
9528 return fold_builtin_carg (loc, arg0, type);
9530 case BUILT_IN_ISASCII:
9531 return fold_builtin_isascii (loc, arg0);
9533 case BUILT_IN_TOASCII:
9534 return fold_builtin_toascii (loc, arg0);
9536 case BUILT_IN_ISDIGIT:
9537 return fold_builtin_isdigit (loc, arg0);
9539 CASE_FLT_FN (BUILT_IN_FINITE):
9540 case BUILT_IN_FINITED32:
9541 case BUILT_IN_FINITED64:
9542 case BUILT_IN_FINITED128:
9543 case BUILT_IN_ISFINITE:
9545 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9546 if (ret)
9547 return ret;
9548 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9551 CASE_FLT_FN (BUILT_IN_ISINF):
9552 case BUILT_IN_ISINFD32:
9553 case BUILT_IN_ISINFD64:
9554 case BUILT_IN_ISINFD128:
9556 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9557 if (ret)
9558 return ret;
9559 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9562 case BUILT_IN_ISNORMAL:
9563 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9565 case BUILT_IN_ISINF_SIGN:
9566 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9568 CASE_FLT_FN (BUILT_IN_ISNAN):
9569 case BUILT_IN_ISNAND32:
9570 case BUILT_IN_ISNAND64:
9571 case BUILT_IN_ISNAND128:
9572 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9574 case BUILT_IN_FREE:
9575 if (integer_zerop (arg0))
9576 return build_empty_stmt (loc);
9577 break;
9579 default:
9580 break;
9583 return NULL_TREE;
9587 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9588 This function returns NULL_TREE if no simplification was possible. */
9590 static tree
9591 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
9593 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9594 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9596 if (TREE_CODE (arg0) == ERROR_MARK
9597 || TREE_CODE (arg1) == ERROR_MARK)
9598 return NULL_TREE;
9600 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9601 return ret;
9603 switch (fcode)
9605 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9606 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9607 if (validate_arg (arg0, REAL_TYPE)
9608 && validate_arg (arg1, POINTER_TYPE))
9609 return do_mpfr_lgamma_r (arg0, arg1, type);
9610 break;
9612 CASE_FLT_FN (BUILT_IN_FREXP):
9613 return fold_builtin_frexp (loc, arg0, arg1, type);
9615 CASE_FLT_FN (BUILT_IN_MODF):
9616 return fold_builtin_modf (loc, arg0, arg1, type);
9618 case BUILT_IN_STRSPN:
9619 return fold_builtin_strspn (loc, arg0, arg1);
9621 case BUILT_IN_STRCSPN:
9622 return fold_builtin_strcspn (loc, arg0, arg1);
9624 case BUILT_IN_STRPBRK:
9625 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9627 case BUILT_IN_EXPECT:
9628 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
9630 case BUILT_IN_ISGREATER:
9631 return fold_builtin_unordered_cmp (loc, fndecl,
9632 arg0, arg1, UNLE_EXPR, LE_EXPR);
9633 case BUILT_IN_ISGREATEREQUAL:
9634 return fold_builtin_unordered_cmp (loc, fndecl,
9635 arg0, arg1, UNLT_EXPR, LT_EXPR);
9636 case BUILT_IN_ISLESS:
9637 return fold_builtin_unordered_cmp (loc, fndecl,
9638 arg0, arg1, UNGE_EXPR, GE_EXPR);
9639 case BUILT_IN_ISLESSEQUAL:
9640 return fold_builtin_unordered_cmp (loc, fndecl,
9641 arg0, arg1, UNGT_EXPR, GT_EXPR);
9642 case BUILT_IN_ISLESSGREATER:
9643 return fold_builtin_unordered_cmp (loc, fndecl,
9644 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9645 case BUILT_IN_ISUNORDERED:
9646 return fold_builtin_unordered_cmp (loc, fndecl,
9647 arg0, arg1, UNORDERED_EXPR,
9648 NOP_EXPR);
9650 /* We do the folding for va_start in the expander. */
9651 case BUILT_IN_VA_START:
9652 break;
9654 case BUILT_IN_OBJECT_SIZE:
9655 return fold_builtin_object_size (arg0, arg1);
9657 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9658 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9660 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9661 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9663 default:
9664 break;
9666 return NULL_TREE;
9669 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9670 and ARG2.
9671 This function returns NULL_TREE if no simplification was possible. */
9673 static tree
9674 fold_builtin_3 (location_t loc, tree fndecl,
9675 tree arg0, tree arg1, tree arg2)
9677 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9678 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9680 if (TREE_CODE (arg0) == ERROR_MARK
9681 || TREE_CODE (arg1) == ERROR_MARK
9682 || TREE_CODE (arg2) == ERROR_MARK)
9683 return NULL_TREE;
9685 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9686 arg0, arg1, arg2))
9687 return ret;
9689 switch (fcode)
9692 CASE_FLT_FN (BUILT_IN_SINCOS):
9693 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9695 CASE_FLT_FN (BUILT_IN_REMQUO):
9696 if (validate_arg (arg0, REAL_TYPE)
9697 && validate_arg (arg1, REAL_TYPE)
9698 && validate_arg (arg2, POINTER_TYPE))
9699 return do_mpfr_remquo (arg0, arg1, arg2);
9700 break;
9702 case BUILT_IN_MEMCMP:
9703 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9705 case BUILT_IN_EXPECT:
9706 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
9708 case BUILT_IN_EXPECT_WITH_PROBABILITY:
9709 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
9711 case BUILT_IN_ADD_OVERFLOW:
9712 case BUILT_IN_SUB_OVERFLOW:
9713 case BUILT_IN_MUL_OVERFLOW:
9714 case BUILT_IN_ADD_OVERFLOW_P:
9715 case BUILT_IN_SUB_OVERFLOW_P:
9716 case BUILT_IN_MUL_OVERFLOW_P:
9717 case BUILT_IN_SADD_OVERFLOW:
9718 case BUILT_IN_SADDL_OVERFLOW:
9719 case BUILT_IN_SADDLL_OVERFLOW:
9720 case BUILT_IN_SSUB_OVERFLOW:
9721 case BUILT_IN_SSUBL_OVERFLOW:
9722 case BUILT_IN_SSUBLL_OVERFLOW:
9723 case BUILT_IN_SMUL_OVERFLOW:
9724 case BUILT_IN_SMULL_OVERFLOW:
9725 case BUILT_IN_SMULLL_OVERFLOW:
9726 case BUILT_IN_UADD_OVERFLOW:
9727 case BUILT_IN_UADDL_OVERFLOW:
9728 case BUILT_IN_UADDLL_OVERFLOW:
9729 case BUILT_IN_USUB_OVERFLOW:
9730 case BUILT_IN_USUBL_OVERFLOW:
9731 case BUILT_IN_USUBLL_OVERFLOW:
9732 case BUILT_IN_UMUL_OVERFLOW:
9733 case BUILT_IN_UMULL_OVERFLOW:
9734 case BUILT_IN_UMULLL_OVERFLOW:
9735 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9737 default:
9738 break;
9740 return NULL_TREE;
9743 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9744 arguments. IGNORE is true if the result of the
9745 function call is ignored. This function returns NULL_TREE if no
9746 simplification was possible. */
9748 tree
9749 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9751 tree ret = NULL_TREE;
9753 switch (nargs)
9755 case 0:
9756 ret = fold_builtin_0 (loc, fndecl);
9757 break;
9758 case 1:
9759 ret = fold_builtin_1 (loc, fndecl, args[0]);
9760 break;
9761 case 2:
9762 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9763 break;
9764 case 3:
9765 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9766 break;
9767 default:
9768 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9769 break;
9771 if (ret)
9773 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9774 SET_EXPR_LOCATION (ret, loc);
9775 return ret;
9777 return NULL_TREE;
9780 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9781 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9782 of arguments in ARGS to be omitted. OLDNARGS is the number of
9783 elements in ARGS. */
9785 static tree
9786 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9787 int skip, tree fndecl, int n, va_list newargs)
9789 int nargs = oldnargs - skip + n;
9790 tree *buffer;
9792 if (n > 0)
9794 int i, j;
9796 buffer = XALLOCAVEC (tree, nargs);
9797 for (i = 0; i < n; i++)
9798 buffer[i] = va_arg (newargs, tree);
9799 for (j = skip; j < oldnargs; j++, i++)
9800 buffer[i] = args[j];
9802 else
9803 buffer = args + skip;
9805 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9808 /* Return true if FNDECL shouldn't be folded right now.
9809 If a built-in function has an inline attribute always_inline
9810 wrapper, defer folding it after always_inline functions have
9811 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9812 might not be performed. */
9814 bool
9815 avoid_folding_inline_builtin (tree fndecl)
9817 return (DECL_DECLARED_INLINE_P (fndecl)
9818 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9819 && cfun
9820 && !cfun->always_inline_functions_inlined
9821 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9824 /* A wrapper function for builtin folding that prevents warnings for
9825 "statement without effect" and the like, caused by removing the
9826 call node earlier than the warning is generated. */
9828 tree
9829 fold_call_expr (location_t loc, tree exp, bool ignore)
9831 tree ret = NULL_TREE;
9832 tree fndecl = get_callee_fndecl (exp);
9833 if (fndecl && fndecl_built_in_p (fndecl)
9834 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9835 yet. Defer folding until we see all the arguments
9836 (after inlining). */
9837 && !CALL_EXPR_VA_ARG_PACK (exp))
9839 int nargs = call_expr_nargs (exp);
9841 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9842 instead last argument is __builtin_va_arg_pack (). Defer folding
9843 even in that case, until arguments are finalized. */
9844 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9846 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9847 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9848 return NULL_TREE;
9851 if (avoid_folding_inline_builtin (fndecl))
9852 return NULL_TREE;
9854 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9855 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9856 CALL_EXPR_ARGP (exp), ignore);
9857 else
9859 tree *args = CALL_EXPR_ARGP (exp);
9860 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9861 if (ret)
9862 return ret;
9865 return NULL_TREE;
9868 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9869 N arguments are passed in the array ARGARRAY. Return a folded
9870 expression or NULL_TREE if no simplification was possible. */
9872 tree
9873 fold_builtin_call_array (location_t loc, tree,
9874 tree fn,
9875 int n,
9876 tree *argarray)
9878 if (TREE_CODE (fn) != ADDR_EXPR)
9879 return NULL_TREE;
9881 tree fndecl = TREE_OPERAND (fn, 0);
9882 if (TREE_CODE (fndecl) == FUNCTION_DECL
9883 && fndecl_built_in_p (fndecl))
9885 /* If last argument is __builtin_va_arg_pack (), arguments to this
9886 function are not finalized yet. Defer folding until they are. */
9887 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9889 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9890 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9891 return NULL_TREE;
9893 if (avoid_folding_inline_builtin (fndecl))
9894 return NULL_TREE;
9895 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9896 return targetm.fold_builtin (fndecl, n, argarray, false);
9897 else
9898 return fold_builtin_n (loc, fndecl, argarray, n, false);
9901 return NULL_TREE;
9904 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9905 along with N new arguments specified as the "..." parameters. SKIP
9906 is the number of arguments in EXP to be omitted. This function is used
9907 to do varargs-to-varargs transformations. */
9909 static tree
9910 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9912 va_list ap;
9913 tree t;
9915 va_start (ap, n);
9916 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9917 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9918 va_end (ap);
9920 return t;
9923 /* Validate a single argument ARG against a tree code CODE representing
9924 a type. Return true when argument is valid. */
9926 static bool
9927 validate_arg (const_tree arg, enum tree_code code)
9929 if (!arg)
9930 return false;
9931 else if (code == POINTER_TYPE)
9932 return POINTER_TYPE_P (TREE_TYPE (arg));
9933 else if (code == INTEGER_TYPE)
9934 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9935 return code == TREE_CODE (TREE_TYPE (arg));
9938 /* This function validates the types of a function call argument list
9939 against a specified list of tree_codes. If the last specifier is a 0,
9940 that represents an ellipses, otherwise the last specifier must be a
9941 VOID_TYPE.
9943 This is the GIMPLE version of validate_arglist. Eventually we want to
9944 completely convert builtins.c to work from GIMPLEs and the tree based
9945 validate_arglist will then be removed. */
9947 bool
9948 validate_gimple_arglist (const gcall *call, ...)
9950 enum tree_code code;
9951 bool res = 0;
9952 va_list ap;
9953 const_tree arg;
9954 size_t i;
9956 va_start (ap, call);
9957 i = 0;
9961 code = (enum tree_code) va_arg (ap, int);
9962 switch (code)
9964 case 0:
9965 /* This signifies an ellipses, any further arguments are all ok. */
9966 res = true;
9967 goto end;
9968 case VOID_TYPE:
9969 /* This signifies an endlink, if no arguments remain, return
9970 true, otherwise return false. */
9971 res = (i == gimple_call_num_args (call));
9972 goto end;
9973 default:
9974 /* If no parameters remain or the parameter's code does not
9975 match the specified code, return false. Otherwise continue
9976 checking any remaining arguments. */
9977 arg = gimple_call_arg (call, i++);
9978 if (!validate_arg (arg, code))
9979 goto end;
9980 break;
9983 while (1);
9985 /* We need gotos here since we can only have one VA_CLOSE in a
9986 function. */
9987 end: ;
9988 va_end (ap);
9990 return res;
9993 /* Default target-specific builtin expander that does nothing. */
9996 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9997 rtx target ATTRIBUTE_UNUSED,
9998 rtx subtarget ATTRIBUTE_UNUSED,
9999 machine_mode mode ATTRIBUTE_UNUSED,
10000 int ignore ATTRIBUTE_UNUSED)
10002 return NULL_RTX;
10005 /* Returns true is EXP represents data that would potentially reside
10006 in a readonly section. */
10008 bool
10009 readonly_data_expr (tree exp)
10011 STRIP_NOPS (exp);
10013 if (TREE_CODE (exp) != ADDR_EXPR)
10014 return false;
10016 exp = get_base_address (TREE_OPERAND (exp, 0));
10017 if (!exp)
10018 return false;
10020 /* Make sure we call decl_readonly_section only for trees it
10021 can handle (since it returns true for everything it doesn't
10022 understand). */
10023 if (TREE_CODE (exp) == STRING_CST
10024 || TREE_CODE (exp) == CONSTRUCTOR
10025 || (VAR_P (exp) && TREE_STATIC (exp)))
10026 return decl_readonly_section (exp, 0);
10027 else
10028 return false;
10031 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10032 to the call, and TYPE is its return type.
10034 Return NULL_TREE if no simplification was possible, otherwise return the
10035 simplified form of the call as a tree.
10037 The simplified form may be a constant or other expression which
10038 computes the same value, but in a more efficient manner (including
10039 calls to other builtin functions).
10041 The call may contain arguments which need to be evaluated, but
10042 which are not useful to determine the result of the call. In
10043 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10044 COMPOUND_EXPR will be an argument which must be evaluated.
10045 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10046 COMPOUND_EXPR in the chain will contain the tree for the simplified
10047 form of the builtin function call. */
10049 static tree
10050 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
10052 if (!validate_arg (s1, POINTER_TYPE)
10053 || !validate_arg (s2, POINTER_TYPE))
10054 return NULL_TREE;
10055 else
10057 tree fn;
10058 const char *p1, *p2;
10060 p2 = c_getstr (s2);
10061 if (p2 == NULL)
10062 return NULL_TREE;
10064 p1 = c_getstr (s1);
10065 if (p1 != NULL)
10067 const char *r = strpbrk (p1, p2);
10068 tree tem;
10070 if (r == NULL)
10071 return build_int_cst (TREE_TYPE (s1), 0);
10073 /* Return an offset into the constant string argument. */
10074 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10075 return fold_convert_loc (loc, type, tem);
10078 if (p2[0] == '\0')
10079 /* strpbrk(x, "") == NULL.
10080 Evaluate and ignore s1 in case it had side-effects. */
10081 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
10083 if (p2[1] != '\0')
10084 return NULL_TREE; /* Really call strpbrk. */
10086 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10087 if (!fn)
10088 return NULL_TREE;
10090 /* New argument list transforming strpbrk(s1, s2) to
10091 strchr(s1, s2[0]). */
10092 return build_call_expr_loc (loc, fn, 2, s1,
10093 build_int_cst (integer_type_node, p2[0]));
10097 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10098 to the call.
10100 Return NULL_TREE if no simplification was possible, otherwise return the
10101 simplified form of the call as a tree.
10103 The simplified form may be a constant or other expression which
10104 computes the same value, but in a more efficient manner (including
10105 calls to other builtin functions).
10107 The call may contain arguments which need to be evaluated, but
10108 which are not useful to determine the result of the call. In
10109 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10110 COMPOUND_EXPR will be an argument which must be evaluated.
10111 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10112 COMPOUND_EXPR in the chain will contain the tree for the simplified
10113 form of the builtin function call. */
10115 static tree
10116 fold_builtin_strspn (location_t loc, tree s1, tree s2)
10118 if (!validate_arg (s1, POINTER_TYPE)
10119 || !validate_arg (s2, POINTER_TYPE))
10120 return NULL_TREE;
10121 else
10123 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10125 /* If either argument is "", return NULL_TREE. */
10126 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10127 /* Evaluate and ignore both arguments in case either one has
10128 side-effects. */
10129 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
10130 s1, s2);
10131 return NULL_TREE;
10135 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10136 to the call.
10138 Return NULL_TREE if no simplification was possible, otherwise return the
10139 simplified form of the call as a tree.
10141 The simplified form may be a constant or other expression which
10142 computes the same value, but in a more efficient manner (including
10143 calls to other builtin functions).
10145 The call may contain arguments which need to be evaluated, but
10146 which are not useful to determine the result of the call. In
10147 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10148 COMPOUND_EXPR will be an argument which must be evaluated.
10149 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10150 COMPOUND_EXPR in the chain will contain the tree for the simplified
10151 form of the builtin function call. */
10153 static tree
10154 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
10156 if (!validate_arg (s1, POINTER_TYPE)
10157 || !validate_arg (s2, POINTER_TYPE))
10158 return NULL_TREE;
10159 else
10161 /* If the first argument is "", return NULL_TREE. */
10162 const char *p1 = c_getstr (s1);
10163 if (p1 && *p1 == '\0')
10165 /* Evaluate and ignore argument s2 in case it has
10166 side-effects. */
10167 return omit_one_operand_loc (loc, size_type_node,
10168 size_zero_node, s2);
10171 /* If the second argument is "", return __builtin_strlen(s1). */
10172 const char *p2 = c_getstr (s2);
10173 if (p2 && *p2 == '\0')
10175 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
10177 /* If the replacement _DECL isn't initialized, don't do the
10178 transformation. */
10179 if (!fn)
10180 return NULL_TREE;
10182 return build_call_expr_loc (loc, fn, 1, s1);
10184 return NULL_TREE;
10188 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10189 produced. False otherwise. This is done so that we don't output the error
10190 or warning twice or three times. */
10192 bool
10193 fold_builtin_next_arg (tree exp, bool va_start_p)
10195 tree fntype = TREE_TYPE (current_function_decl);
10196 int nargs = call_expr_nargs (exp);
10197 tree arg;
10198 /* There is good chance the current input_location points inside the
10199 definition of the va_start macro (perhaps on the token for
10200 builtin) in a system header, so warnings will not be emitted.
10201 Use the location in real source code. */
10202 location_t current_location =
10203 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10204 NULL);
10206 if (!stdarg_p (fntype))
10208 error ("%<va_start%> used in function with fixed args");
10209 return true;
10212 if (va_start_p)
10214 if (va_start_p && (nargs != 2))
10216 error ("wrong number of arguments to function %<va_start%>");
10217 return true;
10219 arg = CALL_EXPR_ARG (exp, 1);
10221 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10222 when we checked the arguments and if needed issued a warning. */
10223 else
10225 if (nargs == 0)
10227 /* Evidently an out of date version of <stdarg.h>; can't validate
10228 va_start's second argument, but can still work as intended. */
10229 warning_at (current_location,
10230 OPT_Wvarargs,
10231 "%<__builtin_next_arg%> called without an argument");
10232 return true;
10234 else if (nargs > 1)
10236 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10237 return true;
10239 arg = CALL_EXPR_ARG (exp, 0);
10242 if (TREE_CODE (arg) == SSA_NAME)
10243 arg = SSA_NAME_VAR (arg);
10245 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10246 or __builtin_next_arg (0) the first time we see it, after checking
10247 the arguments and if needed issuing a warning. */
10248 if (!integer_zerop (arg))
10250 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10252 /* Strip off all nops for the sake of the comparison. This
10253 is not quite the same as STRIP_NOPS. It does more.
10254 We must also strip off INDIRECT_EXPR for C++ reference
10255 parameters. */
10256 while (CONVERT_EXPR_P (arg)
10257 || TREE_CODE (arg) == INDIRECT_REF)
10258 arg = TREE_OPERAND (arg, 0);
10259 if (arg != last_parm)
10261 /* FIXME: Sometimes with the tree optimizers we can get the
10262 not the last argument even though the user used the last
10263 argument. We just warn and set the arg to be the last
10264 argument so that we will get wrong-code because of
10265 it. */
10266 warning_at (current_location,
10267 OPT_Wvarargs,
10268 "second parameter of %<va_start%> not last named argument");
10271 /* Undefined by C99 7.15.1.4p4 (va_start):
10272 "If the parameter parmN is declared with the register storage
10273 class, with a function or array type, or with a type that is
10274 not compatible with the type that results after application of
10275 the default argument promotions, the behavior is undefined."
10277 else if (DECL_REGISTER (arg))
10279 warning_at (current_location,
10280 OPT_Wvarargs,
10281 "undefined behavior when second parameter of "
10282 "%<va_start%> is declared with %<register%> storage");
10285 /* We want to verify the second parameter just once before the tree
10286 optimizers are run and then avoid keeping it in the tree,
10287 as otherwise we could warn even for correct code like:
10288 void foo (int i, ...)
10289 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10290 if (va_start_p)
10291 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10292 else
10293 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10295 return false;
10299 /* Expand a call EXP to __builtin_object_size. */
10301 static rtx
10302 expand_builtin_object_size (tree exp)
10304 tree ost;
10305 int object_size_type;
10306 tree fndecl = get_callee_fndecl (exp);
10308 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10310 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
10311 exp, fndecl);
10312 expand_builtin_trap ();
10313 return const0_rtx;
10316 ost = CALL_EXPR_ARG (exp, 1);
10317 STRIP_NOPS (ost);
10319 if (TREE_CODE (ost) != INTEGER_CST
10320 || tree_int_cst_sgn (ost) < 0
10321 || compare_tree_int (ost, 3) > 0)
10323 error ("%Klast argument of %qD is not integer constant between 0 and 3",
10324 exp, fndecl);
10325 expand_builtin_trap ();
10326 return const0_rtx;
10329 object_size_type = tree_to_shwi (ost);
10331 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10334 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10335 FCODE is the BUILT_IN_* to use.
10336 Return NULL_RTX if we failed; the caller should emit a normal call,
10337 otherwise try to get the result in TARGET, if convenient (and in
10338 mode MODE if that's convenient). */
10340 static rtx
10341 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10342 enum built_in_function fcode)
10344 if (!validate_arglist (exp,
10345 POINTER_TYPE,
10346 fcode == BUILT_IN_MEMSET_CHK
10347 ? INTEGER_TYPE : POINTER_TYPE,
10348 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10349 return NULL_RTX;
10351 tree dest = CALL_EXPR_ARG (exp, 0);
10352 tree src = CALL_EXPR_ARG (exp, 1);
10353 tree len = CALL_EXPR_ARG (exp, 2);
10354 tree size = CALL_EXPR_ARG (exp, 3);
10356 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
10357 /*str=*/NULL_TREE, size);
10359 if (!tree_fits_uhwi_p (size))
10360 return NULL_RTX;
10362 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10364 /* Avoid transforming the checking call to an ordinary one when
10365 an overflow has been detected or when the call couldn't be
10366 validated because the size is not constant. */
10367 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10368 return NULL_RTX;
10370 tree fn = NULL_TREE;
10371 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10372 mem{cpy,pcpy,move,set} is available. */
10373 switch (fcode)
10375 case BUILT_IN_MEMCPY_CHK:
10376 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10377 break;
10378 case BUILT_IN_MEMPCPY_CHK:
10379 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10380 break;
10381 case BUILT_IN_MEMMOVE_CHK:
10382 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10383 break;
10384 case BUILT_IN_MEMSET_CHK:
10385 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10386 break;
10387 default:
10388 break;
10391 if (! fn)
10392 return NULL_RTX;
10394 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10395 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10396 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10397 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10399 else if (fcode == BUILT_IN_MEMSET_CHK)
10400 return NULL_RTX;
10401 else
10403 unsigned int dest_align = get_pointer_alignment (dest);
10405 /* If DEST is not a pointer type, call the normal function. */
10406 if (dest_align == 0)
10407 return NULL_RTX;
10409 /* If SRC and DEST are the same (and not volatile), do nothing. */
10410 if (operand_equal_p (src, dest, 0))
10412 tree expr;
10414 if (fcode != BUILT_IN_MEMPCPY_CHK)
10416 /* Evaluate and ignore LEN in case it has side-effects. */
10417 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10418 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10421 expr = fold_build_pointer_plus (dest, len);
10422 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10425 /* __memmove_chk special case. */
10426 if (fcode == BUILT_IN_MEMMOVE_CHK)
10428 unsigned int src_align = get_pointer_alignment (src);
10430 if (src_align == 0)
10431 return NULL_RTX;
10433 /* If src is categorized for a readonly section we can use
10434 normal __memcpy_chk. */
10435 if (readonly_data_expr (src))
10437 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10438 if (!fn)
10439 return NULL_RTX;
10440 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10441 dest, src, len, size);
10442 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10443 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10444 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10447 return NULL_RTX;
10451 /* Emit warning if a buffer overflow is detected at compile time. */
10453 static void
10454 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10456 /* The source string. */
10457 tree srcstr = NULL_TREE;
10458 /* The size of the destination object. */
10459 tree objsize = NULL_TREE;
10460 /* The string that is being concatenated with (as in __strcat_chk)
10461 or null if it isn't. */
10462 tree catstr = NULL_TREE;
10463 /* The maximum length of the source sequence in a bounded operation
10464 (such as __strncat_chk) or null if the operation isn't bounded
10465 (such as __strcat_chk). */
10466 tree maxread = NULL_TREE;
10467 /* The exact size of the access (such as in __strncpy_chk). */
10468 tree size = NULL_TREE;
10470 switch (fcode)
10472 case BUILT_IN_STRCPY_CHK:
10473 case BUILT_IN_STPCPY_CHK:
10474 srcstr = CALL_EXPR_ARG (exp, 1);
10475 objsize = CALL_EXPR_ARG (exp, 2);
10476 break;
10478 case BUILT_IN_STRCAT_CHK:
10479 /* For __strcat_chk the warning will be emitted only if overflowing
10480 by at least strlen (dest) + 1 bytes. */
10481 catstr = CALL_EXPR_ARG (exp, 0);
10482 srcstr = CALL_EXPR_ARG (exp, 1);
10483 objsize = CALL_EXPR_ARG (exp, 2);
10484 break;
10486 case BUILT_IN_STRNCAT_CHK:
10487 catstr = CALL_EXPR_ARG (exp, 0);
10488 srcstr = CALL_EXPR_ARG (exp, 1);
10489 maxread = CALL_EXPR_ARG (exp, 2);
10490 objsize = CALL_EXPR_ARG (exp, 3);
10491 break;
10493 case BUILT_IN_STRNCPY_CHK:
10494 case BUILT_IN_STPNCPY_CHK:
10495 srcstr = CALL_EXPR_ARG (exp, 1);
10496 size = CALL_EXPR_ARG (exp, 2);
10497 objsize = CALL_EXPR_ARG (exp, 3);
10498 break;
10500 case BUILT_IN_SNPRINTF_CHK:
10501 case BUILT_IN_VSNPRINTF_CHK:
10502 maxread = CALL_EXPR_ARG (exp, 1);
10503 objsize = CALL_EXPR_ARG (exp, 3);
10504 break;
10505 default:
10506 gcc_unreachable ();
10509 if (catstr && maxread)
10511 /* Check __strncat_chk. There is no way to determine the length
10512 of the string to which the source string is being appended so
10513 just warn when the length of the source string is not known. */
10514 check_strncat_sizes (exp, objsize);
10515 return;
10518 /* The destination argument is the first one for all built-ins above. */
10519 tree dst = CALL_EXPR_ARG (exp, 0);
10521 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
10524 /* Emit warning if a buffer overflow is detected at compile time
10525 in __sprintf_chk/__vsprintf_chk calls. */
10527 static void
10528 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10530 tree size, len, fmt;
10531 const char *fmt_str;
10532 int nargs = call_expr_nargs (exp);
10534 /* Verify the required arguments in the original call. */
10536 if (nargs < 4)
10537 return;
10538 size = CALL_EXPR_ARG (exp, 2);
10539 fmt = CALL_EXPR_ARG (exp, 3);
10541 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10542 return;
10544 /* Check whether the format is a literal string constant. */
10545 fmt_str = c_getstr (fmt);
10546 if (fmt_str == NULL)
10547 return;
10549 if (!init_target_chars ())
10550 return;
10552 /* If the format doesn't contain % args or %%, we know its size. */
10553 if (strchr (fmt_str, target_percent) == 0)
10554 len = build_int_cstu (size_type_node, strlen (fmt_str));
10555 /* If the format is "%s" and first ... argument is a string literal,
10556 we know it too. */
10557 else if (fcode == BUILT_IN_SPRINTF_CHK
10558 && strcmp (fmt_str, target_percent_s) == 0)
10560 tree arg;
10562 if (nargs < 5)
10563 return;
10564 arg = CALL_EXPR_ARG (exp, 4);
10565 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10566 return;
10568 len = c_strlen (arg, 1);
10569 if (!len || ! tree_fits_uhwi_p (len))
10570 return;
10572 else
10573 return;
10575 /* Add one for the terminating nul. */
10576 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10578 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
10579 /*maxread=*/NULL_TREE, len, size);
10582 /* Emit warning if a free is called with address of a variable. */
10584 static void
10585 maybe_emit_free_warning (tree exp)
10587 tree arg = CALL_EXPR_ARG (exp, 0);
10589 STRIP_NOPS (arg);
10590 if (TREE_CODE (arg) != ADDR_EXPR)
10591 return;
10593 arg = get_base_address (TREE_OPERAND (arg, 0));
10594 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
10595 return;
10597 if (SSA_VAR_P (arg))
10598 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10599 "%Kattempt to free a non-heap object %qD", exp, arg);
10600 else
10601 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10602 "%Kattempt to free a non-heap object", exp);
10605 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10606 if possible. */
10608 static tree
10609 fold_builtin_object_size (tree ptr, tree ost)
10611 unsigned HOST_WIDE_INT bytes;
10612 int object_size_type;
10614 if (!validate_arg (ptr, POINTER_TYPE)
10615 || !validate_arg (ost, INTEGER_TYPE))
10616 return NULL_TREE;
10618 STRIP_NOPS (ost);
10620 if (TREE_CODE (ost) != INTEGER_CST
10621 || tree_int_cst_sgn (ost) < 0
10622 || compare_tree_int (ost, 3) > 0)
10623 return NULL_TREE;
10625 object_size_type = tree_to_shwi (ost);
10627 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10628 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10629 and (size_t) 0 for types 2 and 3. */
10630 if (TREE_SIDE_EFFECTS (ptr))
10631 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10633 if (TREE_CODE (ptr) == ADDR_EXPR)
10635 compute_builtin_object_size (ptr, object_size_type, &bytes);
10636 if (wi::fits_to_tree_p (bytes, size_type_node))
10637 return build_int_cstu (size_type_node, bytes);
10639 else if (TREE_CODE (ptr) == SSA_NAME)
10641 /* If object size is not known yet, delay folding until
10642 later. Maybe subsequent passes will help determining
10643 it. */
10644 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10645 && wi::fits_to_tree_p (bytes, size_type_node))
10646 return build_int_cstu (size_type_node, bytes);
10649 return NULL_TREE;
10652 /* Builtins with folding operations that operate on "..." arguments
10653 need special handling; we need to store the arguments in a convenient
10654 data structure before attempting any folding. Fortunately there are
10655 only a few builtins that fall into this category. FNDECL is the
10656 function, EXP is the CALL_EXPR for the call. */
10658 static tree
10659 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10661 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10662 tree ret = NULL_TREE;
10664 switch (fcode)
10666 case BUILT_IN_FPCLASSIFY:
10667 ret = fold_builtin_fpclassify (loc, args, nargs);
10668 break;
10670 default:
10671 break;
10673 if (ret)
10675 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10676 SET_EXPR_LOCATION (ret, loc);
10677 TREE_NO_WARNING (ret) = 1;
10678 return ret;
10680 return NULL_TREE;
10683 /* Initialize format string characters in the target charset. */
10685 bool
10686 init_target_chars (void)
10688 static bool init;
10689 if (!init)
10691 target_newline = lang_hooks.to_target_charset ('\n');
10692 target_percent = lang_hooks.to_target_charset ('%');
10693 target_c = lang_hooks.to_target_charset ('c');
10694 target_s = lang_hooks.to_target_charset ('s');
10695 if (target_newline == 0 || target_percent == 0 || target_c == 0
10696 || target_s == 0)
10697 return false;
10699 target_percent_c[0] = target_percent;
10700 target_percent_c[1] = target_c;
10701 target_percent_c[2] = '\0';
10703 target_percent_s[0] = target_percent;
10704 target_percent_s[1] = target_s;
10705 target_percent_s[2] = '\0';
10707 target_percent_s_newline[0] = target_percent;
10708 target_percent_s_newline[1] = target_s;
10709 target_percent_s_newline[2] = target_newline;
10710 target_percent_s_newline[3] = '\0';
10712 init = true;
10714 return true;
10717 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10718 and no overflow/underflow occurred. INEXACT is true if M was not
10719 exactly calculated. TYPE is the tree type for the result. This
10720 function assumes that you cleared the MPFR flags and then
10721 calculated M to see if anything subsequently set a flag prior to
10722 entering this function. Return NULL_TREE if any checks fail. */
10724 static tree
10725 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10727 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10728 overflow/underflow occurred. If -frounding-math, proceed iff the
10729 result of calling FUNC was exact. */
10730 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10731 && (!flag_rounding_math || !inexact))
10733 REAL_VALUE_TYPE rr;
10735 real_from_mpfr (&rr, m, type, GMP_RNDN);
10736 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10737 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10738 but the mpft_t is not, then we underflowed in the
10739 conversion. */
10740 if (real_isfinite (&rr)
10741 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10743 REAL_VALUE_TYPE rmode;
10745 real_convert (&rmode, TYPE_MODE (type), &rr);
10746 /* Proceed iff the specified mode can hold the value. */
10747 if (real_identical (&rmode, &rr))
10748 return build_real (type, rmode);
10751 return NULL_TREE;
10754 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10755 number and no overflow/underflow occurred. INEXACT is true if M
10756 was not exactly calculated. TYPE is the tree type for the result.
10757 This function assumes that you cleared the MPFR flags and then
10758 calculated M to see if anything subsequently set a flag prior to
10759 entering this function. Return NULL_TREE if any checks fail, if
10760 FORCE_CONVERT is true, then bypass the checks. */
10762 static tree
10763 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10765 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10766 overflow/underflow occurred. If -frounding-math, proceed iff the
10767 result of calling FUNC was exact. */
10768 if (force_convert
10769 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10770 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10771 && (!flag_rounding_math || !inexact)))
10773 REAL_VALUE_TYPE re, im;
10775 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10776 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10777 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10778 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10779 but the mpft_t is not, then we underflowed in the
10780 conversion. */
10781 if (force_convert
10782 || (real_isfinite (&re) && real_isfinite (&im)
10783 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10784 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10786 REAL_VALUE_TYPE re_mode, im_mode;
10788 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10789 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10790 /* Proceed iff the specified mode can hold the value. */
10791 if (force_convert
10792 || (real_identical (&re_mode, &re)
10793 && real_identical (&im_mode, &im)))
10794 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10795 build_real (TREE_TYPE (type), im_mode));
10798 return NULL_TREE;
10801 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10802 the pointer *(ARG_QUO) and return the result. The type is taken
10803 from the type of ARG0 and is used for setting the precision of the
10804 calculation and results. */
10806 static tree
10807 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10809 tree const type = TREE_TYPE (arg0);
10810 tree result = NULL_TREE;
10812 STRIP_NOPS (arg0);
10813 STRIP_NOPS (arg1);
10815 /* To proceed, MPFR must exactly represent the target floating point
10816 format, which only happens when the target base equals two. */
10817 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10818 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10819 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10821 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10822 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10824 if (real_isfinite (ra0) && real_isfinite (ra1))
10826 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10827 const int prec = fmt->p;
10828 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10829 tree result_rem;
10830 long integer_quo;
10831 mpfr_t m0, m1;
10833 mpfr_inits2 (prec, m0, m1, NULL);
10834 mpfr_from_real (m0, ra0, GMP_RNDN);
10835 mpfr_from_real (m1, ra1, GMP_RNDN);
10836 mpfr_clear_flags ();
10837 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10838 /* Remquo is independent of the rounding mode, so pass
10839 inexact=0 to do_mpfr_ckconv(). */
10840 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10841 mpfr_clears (m0, m1, NULL);
10842 if (result_rem)
10844 /* MPFR calculates quo in the host's long so it may
10845 return more bits in quo than the target int can hold
10846 if sizeof(host long) > sizeof(target int). This can
10847 happen even for native compilers in LP64 mode. In
10848 these cases, modulo the quo value with the largest
10849 number that the target int can hold while leaving one
10850 bit for the sign. */
10851 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10852 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10854 /* Dereference the quo pointer argument. */
10855 arg_quo = build_fold_indirect_ref (arg_quo);
10856 /* Proceed iff a valid pointer type was passed in. */
10857 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10859 /* Set the value. */
10860 tree result_quo
10861 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10862 build_int_cst (TREE_TYPE (arg_quo),
10863 integer_quo));
10864 TREE_SIDE_EFFECTS (result_quo) = 1;
10865 /* Combine the quo assignment with the rem. */
10866 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10867 result_quo, result_rem));
10872 return result;
10875 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10876 resulting value as a tree with type TYPE. The mpfr precision is
10877 set to the precision of TYPE. We assume that this mpfr function
10878 returns zero if the result could be calculated exactly within the
10879 requested precision. In addition, the integer pointer represented
10880 by ARG_SG will be dereferenced and set to the appropriate signgam
10881 (-1,1) value. */
10883 static tree
10884 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10886 tree result = NULL_TREE;
10888 STRIP_NOPS (arg);
10890 /* To proceed, MPFR must exactly represent the target floating point
10891 format, which only happens when the target base equals two. Also
10892 verify ARG is a constant and that ARG_SG is an int pointer. */
10893 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10894 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10895 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10896 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10898 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10900 /* In addition to NaN and Inf, the argument cannot be zero or a
10901 negative integer. */
10902 if (real_isfinite (ra)
10903 && ra->cl != rvc_zero
10904 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10906 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10907 const int prec = fmt->p;
10908 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10909 int inexact, sg;
10910 mpfr_t m;
10911 tree result_lg;
10913 mpfr_init2 (m, prec);
10914 mpfr_from_real (m, ra, GMP_RNDN);
10915 mpfr_clear_flags ();
10916 inexact = mpfr_lgamma (m, &sg, m, rnd);
10917 result_lg = do_mpfr_ckconv (m, type, inexact);
10918 mpfr_clear (m);
10919 if (result_lg)
10921 tree result_sg;
10923 /* Dereference the arg_sg pointer argument. */
10924 arg_sg = build_fold_indirect_ref (arg_sg);
10925 /* Assign the signgam value into *arg_sg. */
10926 result_sg = fold_build2 (MODIFY_EXPR,
10927 TREE_TYPE (arg_sg), arg_sg,
10928 build_int_cst (TREE_TYPE (arg_sg), sg));
10929 TREE_SIDE_EFFECTS (result_sg) = 1;
10930 /* Combine the signgam assignment with the lgamma result. */
10931 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10932 result_sg, result_lg));
10937 return result;
10940 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10941 mpc function FUNC on it and return the resulting value as a tree
10942 with type TYPE. The mpfr precision is set to the precision of
10943 TYPE. We assume that function FUNC returns zero if the result
10944 could be calculated exactly within the requested precision. If
10945 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10946 in the arguments and/or results. */
10948 tree
10949 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10950 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10952 tree result = NULL_TREE;
10954 STRIP_NOPS (arg0);
10955 STRIP_NOPS (arg1);
10957 /* To proceed, MPFR must exactly represent the target floating point
10958 format, which only happens when the target base equals two. */
10959 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10960 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10961 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10962 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10963 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10965 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10966 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10967 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10968 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10970 if (do_nonfinite
10971 || (real_isfinite (re0) && real_isfinite (im0)
10972 && real_isfinite (re1) && real_isfinite (im1)))
10974 const struct real_format *const fmt =
10975 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10976 const int prec = fmt->p;
10977 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10978 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10979 int inexact;
10980 mpc_t m0, m1;
10982 mpc_init2 (m0, prec);
10983 mpc_init2 (m1, prec);
10984 mpfr_from_real (mpc_realref (m0), re0, rnd);
10985 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10986 mpfr_from_real (mpc_realref (m1), re1, rnd);
10987 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10988 mpfr_clear_flags ();
10989 inexact = func (m0, m0, m1, crnd);
10990 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10991 mpc_clear (m0);
10992 mpc_clear (m1);
10996 return result;
10999 /* A wrapper function for builtin folding that prevents warnings for
11000 "statement without effect" and the like, caused by removing the
11001 call node earlier than the warning is generated. */
11003 tree
11004 fold_call_stmt (gcall *stmt, bool ignore)
11006 tree ret = NULL_TREE;
11007 tree fndecl = gimple_call_fndecl (stmt);
11008 location_t loc = gimple_location (stmt);
11009 if (fndecl && fndecl_built_in_p (fndecl)
11010 && !gimple_call_va_arg_pack_p (stmt))
11012 int nargs = gimple_call_num_args (stmt);
11013 tree *args = (nargs > 0
11014 ? gimple_call_arg_ptr (stmt, 0)
11015 : &error_mark_node);
11017 if (avoid_folding_inline_builtin (fndecl))
11018 return NULL_TREE;
11019 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11021 return targetm.fold_builtin (fndecl, nargs, args, ignore);
11023 else
11025 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11026 if (ret)
11028 /* Propagate location information from original call to
11029 expansion of builtin. Otherwise things like
11030 maybe_emit_chk_warning, that operate on the expansion
11031 of a builtin, will use the wrong location information. */
11032 if (gimple_has_location (stmt))
11034 tree realret = ret;
11035 if (TREE_CODE (ret) == NOP_EXPR)
11036 realret = TREE_OPERAND (ret, 0);
11037 if (CAN_HAVE_LOCATION_P (realret)
11038 && !EXPR_HAS_LOCATION (realret))
11039 SET_EXPR_LOCATION (realret, loc);
11040 return realret;
11042 return ret;
11046 return NULL_TREE;
11049 /* Look up the function in builtin_decl that corresponds to DECL
11050 and set ASMSPEC as its user assembler name. DECL must be a
11051 function decl that declares a builtin. */
11053 void
11054 set_builtin_user_assembler_name (tree decl, const char *asmspec)
11056 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
11057 && asmspec != 0);
11059 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
11060 set_user_assembler_name (builtin, asmspec);
11062 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
11063 && INT_TYPE_SIZE < BITS_PER_WORD)
11065 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
11066 set_user_assembler_libfunc ("ffs", asmspec);
11067 set_optab_libfunc (ffs_optab, mode, "ffs");
11071 /* Return true if DECL is a builtin that expands to a constant or similarly
11072 simple code. */
11073 bool
11074 is_simple_builtin (tree decl)
11076 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
11077 switch (DECL_FUNCTION_CODE (decl))
11079 /* Builtins that expand to constants. */
11080 case BUILT_IN_CONSTANT_P:
11081 case BUILT_IN_EXPECT:
11082 case BUILT_IN_OBJECT_SIZE:
11083 case BUILT_IN_UNREACHABLE:
11084 /* Simple register moves or loads from stack. */
11085 case BUILT_IN_ASSUME_ALIGNED:
11086 case BUILT_IN_RETURN_ADDRESS:
11087 case BUILT_IN_EXTRACT_RETURN_ADDR:
11088 case BUILT_IN_FROB_RETURN_ADDR:
11089 case BUILT_IN_RETURN:
11090 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
11091 case BUILT_IN_FRAME_ADDRESS:
11092 case BUILT_IN_VA_END:
11093 case BUILT_IN_STACK_SAVE:
11094 case BUILT_IN_STACK_RESTORE:
11095 /* Exception state returns or moves registers around. */
11096 case BUILT_IN_EH_FILTER:
11097 case BUILT_IN_EH_POINTER:
11098 case BUILT_IN_EH_COPY_VALUES:
11099 return true;
11101 default:
11102 return false;
11105 return false;
11108 /* Return true if DECL is a builtin that is not expensive, i.e., they are
11109 most probably expanded inline into reasonably simple code. This is a
11110 superset of is_simple_builtin. */
11111 bool
11112 is_inexpensive_builtin (tree decl)
11114 if (!decl)
11115 return false;
11116 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
11117 return true;
11118 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11119 switch (DECL_FUNCTION_CODE (decl))
11121 case BUILT_IN_ABS:
11122 CASE_BUILT_IN_ALLOCA:
11123 case BUILT_IN_BSWAP16:
11124 case BUILT_IN_BSWAP32:
11125 case BUILT_IN_BSWAP64:
11126 case BUILT_IN_CLZ:
11127 case BUILT_IN_CLZIMAX:
11128 case BUILT_IN_CLZL:
11129 case BUILT_IN_CLZLL:
11130 case BUILT_IN_CTZ:
11131 case BUILT_IN_CTZIMAX:
11132 case BUILT_IN_CTZL:
11133 case BUILT_IN_CTZLL:
11134 case BUILT_IN_FFS:
11135 case BUILT_IN_FFSIMAX:
11136 case BUILT_IN_FFSL:
11137 case BUILT_IN_FFSLL:
11138 case BUILT_IN_IMAXABS:
11139 case BUILT_IN_FINITE:
11140 case BUILT_IN_FINITEF:
11141 case BUILT_IN_FINITEL:
11142 case BUILT_IN_FINITED32:
11143 case BUILT_IN_FINITED64:
11144 case BUILT_IN_FINITED128:
11145 case BUILT_IN_FPCLASSIFY:
11146 case BUILT_IN_ISFINITE:
11147 case BUILT_IN_ISINF_SIGN:
11148 case BUILT_IN_ISINF:
11149 case BUILT_IN_ISINFF:
11150 case BUILT_IN_ISINFL:
11151 case BUILT_IN_ISINFD32:
11152 case BUILT_IN_ISINFD64:
11153 case BUILT_IN_ISINFD128:
11154 case BUILT_IN_ISNAN:
11155 case BUILT_IN_ISNANF:
11156 case BUILT_IN_ISNANL:
11157 case BUILT_IN_ISNAND32:
11158 case BUILT_IN_ISNAND64:
11159 case BUILT_IN_ISNAND128:
11160 case BUILT_IN_ISNORMAL:
11161 case BUILT_IN_ISGREATER:
11162 case BUILT_IN_ISGREATEREQUAL:
11163 case BUILT_IN_ISLESS:
11164 case BUILT_IN_ISLESSEQUAL:
11165 case BUILT_IN_ISLESSGREATER:
11166 case BUILT_IN_ISUNORDERED:
11167 case BUILT_IN_VA_ARG_PACK:
11168 case BUILT_IN_VA_ARG_PACK_LEN:
11169 case BUILT_IN_VA_COPY:
11170 case BUILT_IN_TRAP:
11171 case BUILT_IN_SAVEREGS:
11172 case BUILT_IN_POPCOUNTL:
11173 case BUILT_IN_POPCOUNTLL:
11174 case BUILT_IN_POPCOUNTIMAX:
11175 case BUILT_IN_POPCOUNT:
11176 case BUILT_IN_PARITYL:
11177 case BUILT_IN_PARITYLL:
11178 case BUILT_IN_PARITYIMAX:
11179 case BUILT_IN_PARITY:
11180 case BUILT_IN_LABS:
11181 case BUILT_IN_LLABS:
11182 case BUILT_IN_PREFETCH:
11183 case BUILT_IN_ACC_ON_DEVICE:
11184 return true;
11186 default:
11187 return is_simple_builtin (decl);
11190 return false;
11193 /* Return true if T is a constant and the value cast to a target char
11194 can be represented by a host char.
11195 Store the casted char constant in *P if so. */
11197 bool
11198 target_char_cst_p (tree t, char *p)
11200 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
11201 return false;
11203 *p = (char)tree_to_uhwi (t);
11204 return true;
11207 /* Return the maximum object size. */
11209 tree
11210 max_object_size (void)
11212 /* To do: Make this a configurable parameter. */
11213 return TYPE_MAX_VALUE (ptrdiff_type_node);