Emit .note.GNU-stack for hard-float linux targets.
[official-gcc.git] / gcc / builtins.c
blobe4a8694054e2bbd98c0d8361689c17e607a28364
1 /* Expand builtin functions.
2 Copyright (C) 1988-2020 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-restrict.h"
47 #include "stor-layout.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "tree-object-size.h"
51 #include "tree-ssa-strlen.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
75 #include "tree-dfa.h"
77 struct target_builtins default_target_builtins;
78 #if SWITCHABLE_TARGET
79 struct target_builtins *this_target_builtins = &default_target_builtins;
80 #endif
82 /* Define the names of the builtin function types and codes. */
83 const char *const built_in_class_names[BUILT_IN_LAST]
84 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
86 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
87 const char * built_in_names[(int) END_BUILTINS] =
89 #include "builtins.def"
92 /* Setup an array of builtin_info_type, make sure each element decl is
93 initialized to NULL_TREE. */
94 builtin_info_type builtin_info[(int)END_BUILTINS];
96 /* Non-zero if __builtin_constant_p should be folded right away. */
97 bool force_folding_builtin_constant_p;
99 static int target_char_cast (tree, char *);
100 static rtx get_memory_rtx (tree, tree);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx result_vector (int, rtx);
104 static void expand_builtin_prefetch (tree);
105 static rtx expand_builtin_apply_args (void);
106 static rtx expand_builtin_apply_args_1 (void);
107 static rtx expand_builtin_apply (rtx, rtx, rtx);
108 static void expand_builtin_return (rtx);
109 static enum type_class type_to_class (tree);
110 static rtx expand_builtin_classify_type (tree);
111 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
112 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
113 static rtx expand_builtin_interclass_mathfn (tree, rtx);
114 static rtx expand_builtin_sincos (tree);
115 static rtx expand_builtin_cexpi (tree, rtx);
116 static rtx expand_builtin_int_roundingfn (tree, rtx);
117 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
118 static rtx expand_builtin_next_arg (void);
119 static rtx expand_builtin_va_start (tree);
120 static rtx expand_builtin_va_end (tree);
121 static rtx expand_builtin_va_copy (tree);
122 static rtx inline_expand_builtin_string_cmp (tree, rtx);
123 static rtx expand_builtin_strcmp (tree, rtx);
124 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
125 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
126 static rtx expand_builtin_memchr (tree, rtx);
127 static rtx expand_builtin_memcpy (tree, rtx);
128 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
129 rtx target, tree exp,
130 memop_ret retmode,
131 bool might_overlap);
132 static rtx expand_builtin_memmove (tree, rtx);
133 static rtx expand_builtin_mempcpy (tree, rtx);
134 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
135 static rtx expand_builtin_strcat (tree);
136 static rtx expand_builtin_strcpy (tree, rtx);
137 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
138 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
139 static rtx expand_builtin_stpncpy (tree, rtx);
140 static rtx expand_builtin_strncat (tree, rtx);
141 static rtx expand_builtin_strncpy (tree, rtx);
142 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
143 static rtx expand_builtin_memset (tree, rtx, machine_mode);
144 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
145 static rtx expand_builtin_bzero (tree);
146 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
147 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
148 static rtx expand_builtin_alloca (tree);
149 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
150 static rtx expand_builtin_frame_address (tree, tree);
151 static tree stabilize_va_list_loc (location_t, tree, int);
152 static rtx expand_builtin_expect (tree, rtx);
153 static rtx expand_builtin_expect_with_probability (tree, rtx);
154 static tree fold_builtin_constant_p (tree);
155 static tree fold_builtin_classify_type (tree);
156 static tree fold_builtin_strlen (location_t, tree, tree);
157 static tree fold_builtin_inf (location_t, tree, int);
158 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
159 static bool validate_arg (const_tree, enum tree_code code);
160 static rtx expand_builtin_fabs (tree, rtx, rtx);
161 static rtx expand_builtin_signbit (tree, rtx);
162 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
163 static tree fold_builtin_isascii (location_t, tree);
164 static tree fold_builtin_toascii (location_t, tree);
165 static tree fold_builtin_isdigit (location_t, tree);
166 static tree fold_builtin_fabs (location_t, tree, tree);
167 static tree fold_builtin_abs (location_t, tree, tree);
168 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
169 enum tree_code);
170 static tree fold_builtin_varargs (location_t, tree, tree*, int);
172 static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
173 static tree fold_builtin_strspn (location_t, tree, tree, tree);
174 static tree fold_builtin_strcspn (location_t, tree, tree, tree);
176 static rtx expand_builtin_object_size (tree);
177 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
178 enum built_in_function);
179 static void maybe_emit_chk_warning (tree, enum built_in_function);
180 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
181 static void maybe_emit_free_warning (tree);
182 static tree fold_builtin_object_size (tree, tree);
184 unsigned HOST_WIDE_INT target_newline;
185 unsigned HOST_WIDE_INT target_percent;
186 static unsigned HOST_WIDE_INT target_c;
187 static unsigned HOST_WIDE_INT target_s;
188 char target_percent_c[3];
189 char target_percent_s[3];
190 char target_percent_s_newline[4];
191 static tree do_mpfr_remquo (tree, tree, tree);
192 static tree do_mpfr_lgamma_r (tree, tree, tree);
193 static void expand_builtin_sync_synchronize (void);
195 /* Return true if NAME starts with __builtin_ or __sync_. */
197 static bool
198 is_builtin_name (const char *name)
200 if (strncmp (name, "__builtin_", 10) == 0)
201 return true;
202 if (strncmp (name, "__sync_", 7) == 0)
203 return true;
204 if (strncmp (name, "__atomic_", 9) == 0)
205 return true;
206 return false;
209 /* Return true if NODE should be considered for inline expansion regardless
210 of the optimization level. This means whenever a function is invoked with
211 its "internal" name, which normally contains the prefix "__builtin". */
213 bool
214 called_as_built_in (tree node)
216 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
217 we want the name used to call the function, not the name it
218 will have. */
219 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
220 return is_builtin_name (name);
223 /* Compute values M and N such that M divides (address of EXP - N) and such
224 that N < M. If these numbers can be determined, store M in alignp and N in
225 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
226 *alignp and any bit-offset to *bitposp.
228 Note that the address (and thus the alignment) computed here is based
229 on the address to which a symbol resolves, whereas DECL_ALIGN is based
230 on the address at which an object is actually located. These two
231 addresses are not always the same. For example, on ARM targets,
232 the address &foo of a Thumb function foo() has the lowest bit set,
233 whereas foo() itself starts on an even address.
235 If ADDR_P is true we are taking the address of the memory reference EXP
236 and thus cannot rely on the access taking place. */
238 static bool
239 get_object_alignment_2 (tree exp, unsigned int *alignp,
240 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
242 poly_int64 bitsize, bitpos;
243 tree offset;
244 machine_mode mode;
245 int unsignedp, reversep, volatilep;
246 unsigned int align = BITS_PER_UNIT;
247 bool known_alignment = false;
249 /* Get the innermost object and the constant (bitpos) and possibly
250 variable (offset) offset of the access. */
251 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
252 &unsignedp, &reversep, &volatilep);
254 /* Extract alignment information from the innermost object and
255 possibly adjust bitpos and offset. */
256 if (TREE_CODE (exp) == FUNCTION_DECL)
258 /* Function addresses can encode extra information besides their
259 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
260 allows the low bit to be used as a virtual bit, we know
261 that the address itself must be at least 2-byte aligned. */
262 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
263 align = 2 * BITS_PER_UNIT;
265 else if (TREE_CODE (exp) == LABEL_DECL)
267 else if (TREE_CODE (exp) == CONST_DECL)
269 /* The alignment of a CONST_DECL is determined by its initializer. */
270 exp = DECL_INITIAL (exp);
271 align = TYPE_ALIGN (TREE_TYPE (exp));
272 if (CONSTANT_CLASS_P (exp))
273 align = targetm.constant_alignment (exp, align);
275 known_alignment = true;
277 else if (DECL_P (exp))
279 align = DECL_ALIGN (exp);
280 known_alignment = true;
282 else if (TREE_CODE (exp) == INDIRECT_REF
283 || TREE_CODE (exp) == MEM_REF
284 || TREE_CODE (exp) == TARGET_MEM_REF)
286 tree addr = TREE_OPERAND (exp, 0);
287 unsigned ptr_align;
288 unsigned HOST_WIDE_INT ptr_bitpos;
289 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
291 /* If the address is explicitely aligned, handle that. */
292 if (TREE_CODE (addr) == BIT_AND_EXPR
293 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
295 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
296 ptr_bitmask *= BITS_PER_UNIT;
297 align = least_bit_hwi (ptr_bitmask);
298 addr = TREE_OPERAND (addr, 0);
301 known_alignment
302 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
303 align = MAX (ptr_align, align);
305 /* Re-apply explicit alignment to the bitpos. */
306 ptr_bitpos &= ptr_bitmask;
308 /* The alignment of the pointer operand in a TARGET_MEM_REF
309 has to take the variable offset parts into account. */
310 if (TREE_CODE (exp) == TARGET_MEM_REF)
312 if (TMR_INDEX (exp))
314 unsigned HOST_WIDE_INT step = 1;
315 if (TMR_STEP (exp))
316 step = TREE_INT_CST_LOW (TMR_STEP (exp));
317 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
319 if (TMR_INDEX2 (exp))
320 align = BITS_PER_UNIT;
321 known_alignment = false;
324 /* When EXP is an actual memory reference then we can use
325 TYPE_ALIGN of a pointer indirection to derive alignment.
326 Do so only if get_pointer_alignment_1 did not reveal absolute
327 alignment knowledge and if using that alignment would
328 improve the situation. */
329 unsigned int talign;
330 if (!addr_p && !known_alignment
331 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
332 && talign > align)
333 align = talign;
334 else
336 /* Else adjust bitpos accordingly. */
337 bitpos += ptr_bitpos;
338 if (TREE_CODE (exp) == MEM_REF
339 || TREE_CODE (exp) == TARGET_MEM_REF)
340 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
343 else if (TREE_CODE (exp) == STRING_CST)
345 /* STRING_CST are the only constant objects we allow to be not
346 wrapped inside a CONST_DECL. */
347 align = TYPE_ALIGN (TREE_TYPE (exp));
348 if (CONSTANT_CLASS_P (exp))
349 align = targetm.constant_alignment (exp, align);
351 known_alignment = true;
354 /* If there is a non-constant offset part extract the maximum
355 alignment that can prevail. */
356 if (offset)
358 unsigned int trailing_zeros = tree_ctz (offset);
359 if (trailing_zeros < HOST_BITS_PER_INT)
361 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
362 if (inner)
363 align = MIN (align, inner);
367 /* Account for the alignment of runtime coefficients, so that the constant
368 bitpos is guaranteed to be accurate. */
369 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
370 if (alt_align != 0 && alt_align < align)
372 align = alt_align;
373 known_alignment = false;
376 *alignp = align;
377 *bitposp = bitpos.coeffs[0] & (align - 1);
378 return known_alignment;
381 /* For a memory reference expression EXP compute values M and N such that M
382 divides (&EXP - N) and such that N < M. If these numbers can be determined,
383 store M in alignp and N in *BITPOSP and return true. Otherwise return false
384 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
386 bool
387 get_object_alignment_1 (tree exp, unsigned int *alignp,
388 unsigned HOST_WIDE_INT *bitposp)
390 return get_object_alignment_2 (exp, alignp, bitposp, false);
393 /* Return the alignment in bits of EXP, an object. */
395 unsigned int
396 get_object_alignment (tree exp)
398 unsigned HOST_WIDE_INT bitpos = 0;
399 unsigned int align;
401 get_object_alignment_1 (exp, &align, &bitpos);
403 /* align and bitpos now specify known low bits of the pointer.
404 ptr & (align - 1) == bitpos. */
406 if (bitpos != 0)
407 align = least_bit_hwi (bitpos);
408 return align;
411 /* For a pointer valued expression EXP compute values M and N such that M
412 divides (EXP - N) and such that N < M. If these numbers can be determined,
413 store M in alignp and N in *BITPOSP and return true. Return false if
414 the results are just a conservative approximation.
416 If EXP is not a pointer, false is returned too. */
418 bool
419 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
420 unsigned HOST_WIDE_INT *bitposp)
422 STRIP_NOPS (exp);
424 if (TREE_CODE (exp) == ADDR_EXPR)
425 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
426 alignp, bitposp, true);
427 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
429 unsigned int align;
430 unsigned HOST_WIDE_INT bitpos;
431 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
432 &align, &bitpos);
433 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
434 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
435 else
437 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
438 if (trailing_zeros < HOST_BITS_PER_INT)
440 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
441 if (inner)
442 align = MIN (align, inner);
445 *alignp = align;
446 *bitposp = bitpos & (align - 1);
447 return res;
449 else if (TREE_CODE (exp) == SSA_NAME
450 && POINTER_TYPE_P (TREE_TYPE (exp)))
452 unsigned int ptr_align, ptr_misalign;
453 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
455 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
457 *bitposp = ptr_misalign * BITS_PER_UNIT;
458 *alignp = ptr_align * BITS_PER_UNIT;
459 /* Make sure to return a sensible alignment when the multiplication
460 by BITS_PER_UNIT overflowed. */
461 if (*alignp == 0)
462 *alignp = 1u << (HOST_BITS_PER_INT - 1);
463 /* We cannot really tell whether this result is an approximation. */
464 return false;
466 else
468 *bitposp = 0;
469 *alignp = BITS_PER_UNIT;
470 return false;
473 else if (TREE_CODE (exp) == INTEGER_CST)
475 *alignp = BIGGEST_ALIGNMENT;
476 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
477 & (BIGGEST_ALIGNMENT - 1));
478 return true;
481 *bitposp = 0;
482 *alignp = BITS_PER_UNIT;
483 return false;
486 /* Return the alignment in bits of EXP, a pointer valued expression.
487 The alignment returned is, by default, the alignment of the thing that
488 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
490 Otherwise, look at the expression to see if we can do better, i.e., if the
491 expression is actually pointing at an object whose alignment is tighter. */
493 unsigned int
494 get_pointer_alignment (tree exp)
496 unsigned HOST_WIDE_INT bitpos = 0;
497 unsigned int align;
499 get_pointer_alignment_1 (exp, &align, &bitpos);
501 /* align and bitpos now specify known low bits of the pointer.
502 ptr & (align - 1) == bitpos. */
504 if (bitpos != 0)
505 align = least_bit_hwi (bitpos);
507 return align;
510 /* Return the number of leading non-zero elements in the sequence
511 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
512 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
514 unsigned
515 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
517 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
519 unsigned n;
521 if (eltsize == 1)
523 /* Optimize the common case of plain char. */
524 for (n = 0; n < maxelts; n++)
526 const char *elt = (const char*) ptr + n;
527 if (!*elt)
528 break;
531 else
533 for (n = 0; n < maxelts; n++)
535 const char *elt = (const char*) ptr + n * eltsize;
536 if (!memcmp (elt, "\0\0\0\0", eltsize))
537 break;
540 return n;
543 /* For a call at LOC to a function FN that expects a string in the argument
544 ARG, issue a diagnostic due to it being a called with an argument
545 declared at NONSTR that is a character array with no terminating NUL. */
547 void
548 warn_string_no_nul (location_t loc, const char *fn, tree arg, tree decl)
550 if (TREE_NO_WARNING (arg))
551 return;
553 loc = expansion_point_location_if_in_system_header (loc);
555 if (warning_at (loc, OPT_Wstringop_overflow_,
556 "%qs argument missing terminating nul", fn))
558 inform (DECL_SOURCE_LOCATION (decl),
559 "referenced argument declared here");
560 TREE_NO_WARNING (arg) = 1;
564 /* For a call EXPR (which may be null) that expects a string argument
565 and SRC as the argument, returns false if SRC is a character array
566 with no terminating NUL. When nonnull, BOUND is the number of
567 characters in which to expect the terminating NUL.
568 When EXPR is nonnull also issues a warning. */
570 bool
571 check_nul_terminated_array (tree expr, tree src, tree bound /* = NULL_TREE */)
573 tree size;
574 bool exact;
575 tree nonstr = unterminated_array (src, &size, &exact);
576 if (!nonstr)
577 return true;
579 /* NONSTR refers to the non-nul terminated constant array and SIZE
580 is the constant size of the array in bytes. EXACT is true when
581 SIZE is exact. */
583 if (bound)
585 wide_int min, max;
586 if (TREE_CODE (bound) == INTEGER_CST)
587 min = max = wi::to_wide (bound);
588 else
590 value_range_kind rng = get_range_info (bound, &min, &max);
591 if (rng != VR_RANGE)
592 return true;
595 if (wi::leu_p (min, wi::to_wide (size)))
596 return true;
599 if (expr && !TREE_NO_WARNING (expr))
601 tree fndecl = get_callee_fndecl (expr);
602 const char *fname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
603 warn_string_no_nul (EXPR_LOCATION (expr), fname, src, nonstr);
606 return false;
609 /* If EXP refers to an unterminated constant character array return
610 the declaration of the object of which the array is a member or
611 element and if SIZE is not null, set *SIZE to the size of
612 the unterminated array and set *EXACT if the size is exact or
613 clear it otherwise. Otherwise return null. */
615 tree
616 unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */)
618 /* C_STRLEN will return NULL and set DECL in the info
619 structure if EXP references a unterminated array. */
620 c_strlen_data lendata = { };
621 tree len = c_strlen (exp, 1, &lendata);
622 if (len == NULL_TREE && lendata.minlen && lendata.decl)
624 if (size)
626 len = lendata.minlen;
627 if (lendata.off)
629 /* Constant offsets are already accounted for in LENDATA.MINLEN,
630 but not in a SSA_NAME + CST expression. */
631 if (TREE_CODE (lendata.off) == INTEGER_CST)
632 *exact = true;
633 else if (TREE_CODE (lendata.off) == PLUS_EXPR
634 && TREE_CODE (TREE_OPERAND (lendata.off, 1)) == INTEGER_CST)
636 /* Subtract the offset from the size of the array. */
637 *exact = false;
638 tree temp = TREE_OPERAND (lendata.off, 1);
639 temp = fold_convert (ssizetype, temp);
640 len = fold_build2 (MINUS_EXPR, ssizetype, len, temp);
642 else
643 *exact = false;
645 else
646 *exact = true;
648 *size = len;
650 return lendata.decl;
653 return NULL_TREE;
656 /* Compute the length of a null-terminated character string or wide
657 character string handling character sizes of 1, 2, and 4 bytes.
658 TREE_STRING_LENGTH is not the right way because it evaluates to
659 the size of the character array in bytes (as opposed to characters)
660 and because it can contain a zero byte in the middle.
662 ONLY_VALUE should be nonzero if the result is not going to be emitted
663 into the instruction stream and zero if it is going to be expanded.
664 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
665 is returned, otherwise NULL, since
666 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
667 evaluate the side-effects.
669 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
670 accesses. Note that this implies the result is not going to be emitted
671 into the instruction stream.
673 Additional information about the string accessed may be recorded
674 in DATA. For example, if ARG references an unterminated string,
675 then the declaration will be stored in the DECL field. If the
676 length of the unterminated string can be determined, it'll be
677 stored in the LEN field. Note this length could well be different
678 than what a C strlen call would return.
680 ELTSIZE is 1 for normal single byte character strings, and 2 or
681 4 for wide characer strings. ELTSIZE is by default 1.
683 The value returned is of type `ssizetype'. */
685 tree
686 c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
688 /* If we were not passed a DATA pointer, then get one to a local
689 structure. That avoids having to check DATA for NULL before
690 each time we want to use it. */
691 c_strlen_data local_strlen_data = { };
692 if (!data)
693 data = &local_strlen_data;
695 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
697 tree src = STRIP_NOPS (arg);
698 if (TREE_CODE (src) == COND_EXPR
699 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
701 tree len1, len2;
703 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
704 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
705 if (tree_int_cst_equal (len1, len2))
706 return len1;
709 if (TREE_CODE (src) == COMPOUND_EXPR
710 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
711 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
713 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
715 /* Offset from the beginning of the string in bytes. */
716 tree byteoff;
717 tree memsize;
718 tree decl;
719 src = string_constant (src, &byteoff, &memsize, &decl);
720 if (src == 0)
721 return NULL_TREE;
723 /* Determine the size of the string element. */
724 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
725 return NULL_TREE;
727 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
728 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
729 in case the latter is less than the size of the array, such as when
730 SRC refers to a short string literal used to initialize a large array.
731 In that case, the elements of the array after the terminating NUL are
732 all NUL. */
733 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
734 strelts = strelts / eltsize;
736 if (!tree_fits_uhwi_p (memsize))
737 return NULL_TREE;
739 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
741 /* PTR can point to the byte representation of any string type, including
742 char* and wchar_t*. */
743 const char *ptr = TREE_STRING_POINTER (src);
745 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
747 /* The code below works only for single byte character types. */
748 if (eltsize != 1)
749 return NULL_TREE;
751 /* If the string has an internal NUL character followed by any
752 non-NUL characters (e.g., "foo\0bar"), we can't compute
753 the offset to the following NUL if we don't know where to
754 start searching for it. */
755 unsigned len = string_length (ptr, eltsize, strelts);
757 /* Return when an embedded null character is found or none at all.
758 In the latter case, set the DECL/LEN field in the DATA structure
759 so that callers may examine them. */
760 if (len + 1 < strelts)
761 return NULL_TREE;
762 else if (len >= maxelts)
764 data->decl = decl;
765 data->off = byteoff;
766 data->minlen = ssize_int (len);
767 return NULL_TREE;
770 /* For empty strings the result should be zero. */
771 if (len == 0)
772 return ssize_int (0);
774 /* We don't know the starting offset, but we do know that the string
775 has no internal zero bytes. If the offset falls within the bounds
776 of the string subtract the offset from the length of the string,
777 and return that. Otherwise the length is zero. Take care to
778 use SAVE_EXPR in case the OFFSET has side-effects. */
779 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
780 : byteoff;
781 offsave = fold_convert_loc (loc, sizetype, offsave);
782 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
783 size_int (len));
784 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
785 offsave);
786 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
787 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
788 build_zero_cst (ssizetype));
791 /* Offset from the beginning of the string in elements. */
792 HOST_WIDE_INT eltoff;
794 /* We have a known offset into the string. Start searching there for
795 a null character if we can represent it as a single HOST_WIDE_INT. */
796 if (byteoff == 0)
797 eltoff = 0;
798 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
799 eltoff = -1;
800 else
801 eltoff = tree_to_uhwi (byteoff) / eltsize;
803 /* If the offset is known to be out of bounds, warn, and call strlen at
804 runtime. */
805 if (eltoff < 0 || eltoff >= maxelts)
807 /* Suppress multiple warnings for propagated constant strings. */
808 if (only_value != 2
809 && !TREE_NO_WARNING (arg)
810 && warning_at (loc, OPT_Warray_bounds,
811 "offset %qwi outside bounds of constant string",
812 eltoff))
814 if (decl)
815 inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
816 TREE_NO_WARNING (arg) = 1;
818 return NULL_TREE;
821 /* If eltoff is larger than strelts but less than maxelts the
822 string length is zero, since the excess memory will be zero. */
823 if (eltoff > strelts)
824 return ssize_int (0);
826 /* Use strlen to search for the first zero byte. Since any strings
827 constructed with build_string will have nulls appended, we win even
828 if we get handed something like (char[4])"abcd".
830 Since ELTOFF is our starting index into the string, no further
831 calculation is needed. */
832 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
833 strelts - eltoff);
835 /* Don't know what to return if there was no zero termination.
836 Ideally this would turn into a gcc_checking_assert over time.
837 Set DECL/LEN so callers can examine them. */
838 if (len >= maxelts - eltoff)
840 data->decl = decl;
841 data->off = byteoff;
842 data->minlen = ssize_int (len);
843 return NULL_TREE;
846 return ssize_int (len);
849 /* Return a constant integer corresponding to target reading
850 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
851 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
852 are assumed to be zero, otherwise it reads as many characters
853 as needed. */
856 c_readstr (const char *str, scalar_int_mode mode,
857 bool null_terminated_p/*=true*/)
859 HOST_WIDE_INT ch;
860 unsigned int i, j;
861 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
863 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
864 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
865 / HOST_BITS_PER_WIDE_INT;
867 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
868 for (i = 0; i < len; i++)
869 tmp[i] = 0;
871 ch = 1;
872 for (i = 0; i < GET_MODE_SIZE (mode); i++)
874 j = i;
875 if (WORDS_BIG_ENDIAN)
876 j = GET_MODE_SIZE (mode) - i - 1;
877 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
878 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
879 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
880 j *= BITS_PER_UNIT;
882 if (ch || !null_terminated_p)
883 ch = (unsigned char) str[i];
884 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
887 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
888 return immed_wide_int_const (c, mode);
891 /* Cast a target constant CST to target CHAR and if that value fits into
892 host char type, return zero and put that value into variable pointed to by
893 P. */
895 static int
896 target_char_cast (tree cst, char *p)
898 unsigned HOST_WIDE_INT val, hostval;
900 if (TREE_CODE (cst) != INTEGER_CST
901 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
902 return 1;
904 /* Do not care if it fits or not right here. */
905 val = TREE_INT_CST_LOW (cst);
907 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
908 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
910 hostval = val;
911 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
912 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
914 if (val != hostval)
915 return 1;
917 *p = hostval;
918 return 0;
921 /* Similar to save_expr, but assumes that arbitrary code is not executed
922 in between the multiple evaluations. In particular, we assume that a
923 non-addressable local variable will not be modified. */
925 static tree
926 builtin_save_expr (tree exp)
928 if (TREE_CODE (exp) == SSA_NAME
929 || (TREE_ADDRESSABLE (exp) == 0
930 && (TREE_CODE (exp) == PARM_DECL
931 || (VAR_P (exp) && !TREE_STATIC (exp)))))
932 return exp;
934 return save_expr (exp);
937 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
938 times to get the address of either a higher stack frame, or a return
939 address located within it (depending on FNDECL_CODE). */
941 static rtx
942 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
944 int i;
945 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
946 if (tem == NULL_RTX)
948 /* For a zero count with __builtin_return_address, we don't care what
949 frame address we return, because target-specific definitions will
950 override us. Therefore frame pointer elimination is OK, and using
951 the soft frame pointer is OK.
953 For a nonzero count, or a zero count with __builtin_frame_address,
954 we require a stable offset from the current frame pointer to the
955 previous one, so we must use the hard frame pointer, and
956 we must disable frame pointer elimination. */
957 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
958 tem = frame_pointer_rtx;
959 else
961 tem = hard_frame_pointer_rtx;
963 /* Tell reload not to eliminate the frame pointer. */
964 crtl->accesses_prior_frames = 1;
968 if (count > 0)
969 SETUP_FRAME_ADDRESSES ();
971 /* On the SPARC, the return address is not in the frame, it is in a
972 register. There is no way to access it off of the current frame
973 pointer, but it can be accessed off the previous frame pointer by
974 reading the value from the register window save area. */
975 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
976 count--;
978 /* Scan back COUNT frames to the specified frame. */
979 for (i = 0; i < count; i++)
981 /* Assume the dynamic chain pointer is in the word that the
982 frame address points to, unless otherwise specified. */
983 tem = DYNAMIC_CHAIN_ADDRESS (tem);
984 tem = memory_address (Pmode, tem);
985 tem = gen_frame_mem (Pmode, tem);
986 tem = copy_to_reg (tem);
989 /* For __builtin_frame_address, return what we've got. But, on
990 the SPARC for example, we may have to add a bias. */
991 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
992 return FRAME_ADDR_RTX (tem);
994 /* For __builtin_return_address, get the return address from that frame. */
995 #ifdef RETURN_ADDR_RTX
996 tem = RETURN_ADDR_RTX (count, tem);
997 #else
998 tem = memory_address (Pmode,
999 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
1000 tem = gen_frame_mem (Pmode, tem);
1001 #endif
1002 return tem;
1005 /* Alias set used for setjmp buffer. */
1006 static alias_set_type setjmp_alias_set = -1;
1008 /* Construct the leading half of a __builtin_setjmp call. Control will
1009 return to RECEIVER_LABEL. This is also called directly by the SJLJ
1010 exception handling code. */
1012 void
1013 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
1015 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1016 rtx stack_save;
1017 rtx mem;
1019 if (setjmp_alias_set == -1)
1020 setjmp_alias_set = new_alias_set ();
1022 buf_addr = convert_memory_address (Pmode, buf_addr);
1024 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
1026 /* We store the frame pointer and the address of receiver_label in
1027 the buffer and use the rest of it for the stack save area, which
1028 is machine-dependent. */
1030 mem = gen_rtx_MEM (Pmode, buf_addr);
1031 set_mem_alias_set (mem, setjmp_alias_set);
1032 emit_move_insn (mem, hard_frame_pointer_rtx);
1034 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1035 GET_MODE_SIZE (Pmode))),
1036 set_mem_alias_set (mem, setjmp_alias_set);
1038 emit_move_insn (validize_mem (mem),
1039 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
1041 stack_save = gen_rtx_MEM (sa_mode,
1042 plus_constant (Pmode, buf_addr,
1043 2 * GET_MODE_SIZE (Pmode)));
1044 set_mem_alias_set (stack_save, setjmp_alias_set);
1045 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1047 /* If there is further processing to do, do it. */
1048 if (targetm.have_builtin_setjmp_setup ())
1049 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
1051 /* We have a nonlocal label. */
1052 cfun->has_nonlocal_label = 1;
1055 /* Construct the trailing part of a __builtin_setjmp call. This is
1056 also called directly by the SJLJ exception handling code.
1057 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
1059 void
1060 expand_builtin_setjmp_receiver (rtx receiver_label)
1062 rtx chain;
1064 /* Mark the FP as used when we get here, so we have to make sure it's
1065 marked as used by this function. */
1066 emit_use (hard_frame_pointer_rtx);
1068 /* Mark the static chain as clobbered here so life information
1069 doesn't get messed up for it. */
1070 chain = rtx_for_static_chain (current_function_decl, true);
1071 if (chain && REG_P (chain))
1072 emit_clobber (chain);
1074 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
1076 /* If the argument pointer can be eliminated in favor of the
1077 frame pointer, we don't need to restore it. We assume here
1078 that if such an elimination is present, it can always be used.
1079 This is the case on all known machines; if we don't make this
1080 assumption, we do unnecessary saving on many machines. */
1081 size_t i;
1082 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1084 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1085 if (elim_regs[i].from == ARG_POINTER_REGNUM
1086 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1087 break;
1089 if (i == ARRAY_SIZE (elim_regs))
1091 /* Now restore our arg pointer from the address at which it
1092 was saved in our stack frame. */
1093 emit_move_insn (crtl->args.internal_arg_pointer,
1094 copy_to_reg (get_arg_pointer_save_area ()));
1098 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
1099 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
1100 else if (targetm.have_nonlocal_goto_receiver ())
1101 emit_insn (targetm.gen_nonlocal_goto_receiver ());
1102 else
1103 { /* Nothing */ }
1105 /* We must not allow the code we just generated to be reordered by
1106 scheduling. Specifically, the update of the frame pointer must
1107 happen immediately, not later. */
1108 emit_insn (gen_blockage ());
1111 /* __builtin_longjmp is passed a pointer to an array of five words (not
1112 all will be used on all machines). It operates similarly to the C
1113 library function of the same name, but is more efficient. Much of
1114 the code below is copied from the handling of non-local gotos. */
1116 static void
1117 expand_builtin_longjmp (rtx buf_addr, rtx value)
1119 rtx fp, lab, stack;
1120 rtx_insn *insn, *last;
1121 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1123 /* DRAP is needed for stack realign if longjmp is expanded to current
1124 function */
1125 if (SUPPORTS_STACK_ALIGNMENT)
1126 crtl->need_drap = true;
1128 if (setjmp_alias_set == -1)
1129 setjmp_alias_set = new_alias_set ();
1131 buf_addr = convert_memory_address (Pmode, buf_addr);
1133 buf_addr = force_reg (Pmode, buf_addr);
1135 /* We require that the user must pass a second argument of 1, because
1136 that is what builtin_setjmp will return. */
1137 gcc_assert (value == const1_rtx);
1139 last = get_last_insn ();
1140 if (targetm.have_builtin_longjmp ())
1141 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1142 else
1144 fp = gen_rtx_MEM (Pmode, buf_addr);
1145 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1146 GET_MODE_SIZE (Pmode)));
1148 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1149 2 * GET_MODE_SIZE (Pmode)));
1150 set_mem_alias_set (fp, setjmp_alias_set);
1151 set_mem_alias_set (lab, setjmp_alias_set);
1152 set_mem_alias_set (stack, setjmp_alias_set);
1154 /* Pick up FP, label, and SP from the block and jump. This code is
1155 from expand_goto in stmt.c; see there for detailed comments. */
1156 if (targetm.have_nonlocal_goto ())
1157 /* We have to pass a value to the nonlocal_goto pattern that will
1158 get copied into the static_chain pointer, but it does not matter
1159 what that value is, because builtin_setjmp does not use it. */
1160 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1161 else
1163 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1164 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1166 lab = copy_to_reg (lab);
1168 /* Restore the frame pointer and stack pointer. We must use a
1169 temporary since the setjmp buffer may be a local. */
1170 fp = copy_to_reg (fp);
1171 emit_stack_restore (SAVE_NONLOCAL, stack);
1173 /* Ensure the frame pointer move is not optimized. */
1174 emit_insn (gen_blockage ());
1175 emit_clobber (hard_frame_pointer_rtx);
1176 emit_clobber (frame_pointer_rtx);
1177 emit_move_insn (hard_frame_pointer_rtx, fp);
1179 emit_use (hard_frame_pointer_rtx);
1180 emit_use (stack_pointer_rtx);
1181 emit_indirect_jump (lab);
1185 /* Search backwards and mark the jump insn as a non-local goto.
1186 Note that this precludes the use of __builtin_longjmp to a
1187 __builtin_setjmp target in the same function. However, we've
1188 already cautioned the user that these functions are for
1189 internal exception handling use only. */
1190 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1192 gcc_assert (insn != last);
1194 if (JUMP_P (insn))
1196 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1197 break;
1199 else if (CALL_P (insn))
1200 break;
1204 static inline bool
1205 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1207 return (iter->i < iter->n);
1210 /* This function validates the types of a function call argument list
1211 against a specified list of tree_codes. If the last specifier is a 0,
1212 that represents an ellipsis, otherwise the last specifier must be a
1213 VOID_TYPE. */
1215 static bool
1216 validate_arglist (const_tree callexpr, ...)
1218 enum tree_code code;
1219 bool res = 0;
1220 va_list ap;
1221 const_call_expr_arg_iterator iter;
1222 const_tree arg;
1224 va_start (ap, callexpr);
1225 init_const_call_expr_arg_iterator (callexpr, &iter);
1227 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1228 tree fn = CALL_EXPR_FN (callexpr);
1229 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1231 for (unsigned argno = 1; ; ++argno)
1233 code = (enum tree_code) va_arg (ap, int);
1235 switch (code)
1237 case 0:
1238 /* This signifies an ellipses, any further arguments are all ok. */
1239 res = true;
1240 goto end;
1241 case VOID_TYPE:
1242 /* This signifies an endlink, if no arguments remain, return
1243 true, otherwise return false. */
1244 res = !more_const_call_expr_args_p (&iter);
1245 goto end;
1246 case POINTER_TYPE:
1247 /* The actual argument must be nonnull when either the whole
1248 called function has been declared nonnull, or when the formal
1249 argument corresponding to the actual argument has been. */
1250 if (argmap
1251 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1253 arg = next_const_call_expr_arg (&iter);
1254 if (!validate_arg (arg, code) || integer_zerop (arg))
1255 goto end;
1256 break;
1258 /* FALLTHRU */
1259 default:
1260 /* If no parameters remain or the parameter's code does not
1261 match the specified code, return false. Otherwise continue
1262 checking any remaining arguments. */
1263 arg = next_const_call_expr_arg (&iter);
1264 if (!validate_arg (arg, code))
1265 goto end;
1266 break;
1270 /* We need gotos here since we can only have one VA_CLOSE in a
1271 function. */
1272 end: ;
1273 va_end (ap);
1275 BITMAP_FREE (argmap);
1277 return res;
1280 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1281 and the address of the save area. */
1283 static rtx
1284 expand_builtin_nonlocal_goto (tree exp)
1286 tree t_label, t_save_area;
1287 rtx r_label, r_save_area, r_fp, r_sp;
1288 rtx_insn *insn;
1290 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1291 return NULL_RTX;
1293 t_label = CALL_EXPR_ARG (exp, 0);
1294 t_save_area = CALL_EXPR_ARG (exp, 1);
1296 r_label = expand_normal (t_label);
1297 r_label = convert_memory_address (Pmode, r_label);
1298 r_save_area = expand_normal (t_save_area);
1299 r_save_area = convert_memory_address (Pmode, r_save_area);
1300 /* Copy the address of the save location to a register just in case it was
1301 based on the frame pointer. */
1302 r_save_area = copy_to_reg (r_save_area);
1303 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1304 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1305 plus_constant (Pmode, r_save_area,
1306 GET_MODE_SIZE (Pmode)));
1308 crtl->has_nonlocal_goto = 1;
1310 /* ??? We no longer need to pass the static chain value, afaik. */
1311 if (targetm.have_nonlocal_goto ())
1312 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1313 else
1315 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1316 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1318 r_label = copy_to_reg (r_label);
1320 /* Restore the frame pointer and stack pointer. We must use a
1321 temporary since the setjmp buffer may be a local. */
1322 r_fp = copy_to_reg (r_fp);
1323 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1325 /* Ensure the frame pointer move is not optimized. */
1326 emit_insn (gen_blockage ());
1327 emit_clobber (hard_frame_pointer_rtx);
1328 emit_clobber (frame_pointer_rtx);
1329 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1331 /* USE of hard_frame_pointer_rtx added for consistency;
1332 not clear if really needed. */
1333 emit_use (hard_frame_pointer_rtx);
1334 emit_use (stack_pointer_rtx);
1336 /* If the architecture is using a GP register, we must
1337 conservatively assume that the target function makes use of it.
1338 The prologue of functions with nonlocal gotos must therefore
1339 initialize the GP register to the appropriate value, and we
1340 must then make sure that this value is live at the point
1341 of the jump. (Note that this doesn't necessarily apply
1342 to targets with a nonlocal_goto pattern; they are free
1343 to implement it in their own way. Note also that this is
1344 a no-op if the GP register is a global invariant.) */
1345 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1346 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1347 emit_use (pic_offset_table_rtx);
1349 emit_indirect_jump (r_label);
1352 /* Search backwards to the jump insn and mark it as a
1353 non-local goto. */
1354 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1356 if (JUMP_P (insn))
1358 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1359 break;
1361 else if (CALL_P (insn))
1362 break;
1365 return const0_rtx;
1368 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1369 (not all will be used on all machines) that was passed to __builtin_setjmp.
1370 It updates the stack pointer in that block to the current value. This is
1371 also called directly by the SJLJ exception handling code. */
1373 void
1374 expand_builtin_update_setjmp_buf (rtx buf_addr)
1376 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1377 buf_addr = convert_memory_address (Pmode, buf_addr);
1378 rtx stack_save
1379 = gen_rtx_MEM (sa_mode,
1380 memory_address
1381 (sa_mode,
1382 plus_constant (Pmode, buf_addr,
1383 2 * GET_MODE_SIZE (Pmode))));
1385 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1388 /* Expand a call to __builtin_prefetch. For a target that does not support
1389 data prefetch, evaluate the memory address argument in case it has side
1390 effects. */
1392 static void
1393 expand_builtin_prefetch (tree exp)
1395 tree arg0, arg1, arg2;
1396 int nargs;
1397 rtx op0, op1, op2;
1399 if (!validate_arglist (exp, POINTER_TYPE, 0))
1400 return;
1402 arg0 = CALL_EXPR_ARG (exp, 0);
1404 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1405 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1406 locality). */
1407 nargs = call_expr_nargs (exp);
1408 if (nargs > 1)
1409 arg1 = CALL_EXPR_ARG (exp, 1);
1410 else
1411 arg1 = integer_zero_node;
1412 if (nargs > 2)
1413 arg2 = CALL_EXPR_ARG (exp, 2);
1414 else
1415 arg2 = integer_three_node;
1417 /* Argument 0 is an address. */
1418 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1420 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1421 if (TREE_CODE (arg1) != INTEGER_CST)
1423 error ("second argument to %<__builtin_prefetch%> must be a constant");
1424 arg1 = integer_zero_node;
1426 op1 = expand_normal (arg1);
1427 /* Argument 1 must be either zero or one. */
1428 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1430 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1431 " using zero");
1432 op1 = const0_rtx;
1435 /* Argument 2 (locality) must be a compile-time constant int. */
1436 if (TREE_CODE (arg2) != INTEGER_CST)
1438 error ("third argument to %<__builtin_prefetch%> must be a constant");
1439 arg2 = integer_zero_node;
1441 op2 = expand_normal (arg2);
1442 /* Argument 2 must be 0, 1, 2, or 3. */
1443 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1445 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1446 op2 = const0_rtx;
1449 if (targetm.have_prefetch ())
1451 class expand_operand ops[3];
1453 create_address_operand (&ops[0], op0);
1454 create_integer_operand (&ops[1], INTVAL (op1));
1455 create_integer_operand (&ops[2], INTVAL (op2));
1456 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1457 return;
1460 /* Don't do anything with direct references to volatile memory, but
1461 generate code to handle other side effects. */
1462 if (!MEM_P (op0) && side_effects_p (op0))
1463 emit_insn (op0);
1466 /* Get a MEM rtx for expression EXP which is the address of an operand
1467 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
1468 the maximum length of the block of memory that might be accessed or
1469 NULL if unknown. */
1471 static rtx
1472 get_memory_rtx (tree exp, tree len)
1474 tree orig_exp = exp;
1475 rtx addr, mem;
1477 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1478 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1479 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1480 exp = TREE_OPERAND (exp, 0);
1482 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1483 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1485 /* Get an expression we can use to find the attributes to assign to MEM.
1486 First remove any nops. */
1487 while (CONVERT_EXPR_P (exp)
1488 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1489 exp = TREE_OPERAND (exp, 0);
1491 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1492 (as builtin stringops may alias with anything). */
1493 exp = fold_build2 (MEM_REF,
1494 build_array_type (char_type_node,
1495 build_range_type (sizetype,
1496 size_one_node, len)),
1497 exp, build_int_cst (ptr_type_node, 0));
1499 /* If the MEM_REF has no acceptable address, try to get the base object
1500 from the original address we got, and build an all-aliasing
1501 unknown-sized access to that one. */
1502 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1503 set_mem_attributes (mem, exp, 0);
1504 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1505 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1506 0))))
1508 exp = build_fold_addr_expr (exp);
1509 exp = fold_build2 (MEM_REF,
1510 build_array_type (char_type_node,
1511 build_range_type (sizetype,
1512 size_zero_node,
1513 NULL)),
1514 exp, build_int_cst (ptr_type_node, 0));
1515 set_mem_attributes (mem, exp, 0);
1517 set_mem_alias_set (mem, 0);
1518 return mem;
1521 /* Built-in functions to perform an untyped call and return. */
1523 #define apply_args_mode \
1524 (this_target_builtins->x_apply_args_mode)
1525 #define apply_result_mode \
1526 (this_target_builtins->x_apply_result_mode)
1528 /* Return the size required for the block returned by __builtin_apply_args,
1529 and initialize apply_args_mode. */
1531 static int
1532 apply_args_size (void)
1534 static int size = -1;
1535 int align;
1536 unsigned int regno;
1538 /* The values computed by this function never change. */
1539 if (size < 0)
1541 /* The first value is the incoming arg-pointer. */
1542 size = GET_MODE_SIZE (Pmode);
1544 /* The second value is the structure value address unless this is
1545 passed as an "invisible" first argument. */
1546 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1547 size += GET_MODE_SIZE (Pmode);
1549 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1550 if (FUNCTION_ARG_REGNO_P (regno))
1552 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1554 gcc_assert (mode != VOIDmode);
1556 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1557 if (size % align != 0)
1558 size = CEIL (size, align) * align;
1559 size += GET_MODE_SIZE (mode);
1560 apply_args_mode[regno] = mode;
1562 else
1564 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1567 return size;
1570 /* Return the size required for the block returned by __builtin_apply,
1571 and initialize apply_result_mode. */
1573 static int
1574 apply_result_size (void)
1576 static int size = -1;
1577 int align, regno;
1579 /* The values computed by this function never change. */
1580 if (size < 0)
1582 size = 0;
1584 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1585 if (targetm.calls.function_value_regno_p (regno))
1587 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1589 gcc_assert (mode != VOIDmode);
1591 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1592 if (size % align != 0)
1593 size = CEIL (size, align) * align;
1594 size += GET_MODE_SIZE (mode);
1595 apply_result_mode[regno] = mode;
1597 else
1598 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1600 /* Allow targets that use untyped_call and untyped_return to override
1601 the size so that machine-specific information can be stored here. */
1602 #ifdef APPLY_RESULT_SIZE
1603 size = APPLY_RESULT_SIZE;
1604 #endif
1606 return size;
1609 /* Create a vector describing the result block RESULT. If SAVEP is true,
1610 the result block is used to save the values; otherwise it is used to
1611 restore the values. */
1613 static rtx
1614 result_vector (int savep, rtx result)
1616 int regno, size, align, nelts;
1617 fixed_size_mode mode;
1618 rtx reg, mem;
1619 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1621 size = nelts = 0;
1622 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1623 if ((mode = apply_result_mode[regno]) != VOIDmode)
1625 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1626 if (size % align != 0)
1627 size = CEIL (size, align) * align;
1628 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1629 mem = adjust_address (result, mode, size);
1630 savevec[nelts++] = (savep
1631 ? gen_rtx_SET (mem, reg)
1632 : gen_rtx_SET (reg, mem));
1633 size += GET_MODE_SIZE (mode);
1635 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1638 /* Save the state required to perform an untyped call with the same
1639 arguments as were passed to the current function. */
1641 static rtx
1642 expand_builtin_apply_args_1 (void)
1644 rtx registers, tem;
1645 int size, align, regno;
1646 fixed_size_mode mode;
1647 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1649 /* Create a block where the arg-pointer, structure value address,
1650 and argument registers can be saved. */
1651 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1653 /* Walk past the arg-pointer and structure value address. */
1654 size = GET_MODE_SIZE (Pmode);
1655 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1656 size += GET_MODE_SIZE (Pmode);
1658 /* Save each register used in calling a function to the block. */
1659 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1660 if ((mode = apply_args_mode[regno]) != VOIDmode)
1662 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1663 if (size % align != 0)
1664 size = CEIL (size, align) * align;
1666 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1668 emit_move_insn (adjust_address (registers, mode, size), tem);
1669 size += GET_MODE_SIZE (mode);
1672 /* Save the arg pointer to the block. */
1673 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1674 /* We need the pointer as the caller actually passed them to us, not
1675 as we might have pretended they were passed. Make sure it's a valid
1676 operand, as emit_move_insn isn't expected to handle a PLUS. */
1677 if (STACK_GROWS_DOWNWARD)
1679 = force_operand (plus_constant (Pmode, tem,
1680 crtl->args.pretend_args_size),
1681 NULL_RTX);
1682 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1684 size = GET_MODE_SIZE (Pmode);
1686 /* Save the structure value address unless this is passed as an
1687 "invisible" first argument. */
1688 if (struct_incoming_value)
1689 emit_move_insn (adjust_address (registers, Pmode, size),
1690 copy_to_reg (struct_incoming_value));
1692 /* Return the address of the block. */
1693 return copy_addr_to_reg (XEXP (registers, 0));
1696 /* __builtin_apply_args returns block of memory allocated on
1697 the stack into which is stored the arg pointer, structure
1698 value address, static chain, and all the registers that might
1699 possibly be used in performing a function call. The code is
1700 moved to the start of the function so the incoming values are
1701 saved. */
1703 static rtx
1704 expand_builtin_apply_args (void)
1706 /* Don't do __builtin_apply_args more than once in a function.
1707 Save the result of the first call and reuse it. */
1708 if (apply_args_value != 0)
1709 return apply_args_value;
1711 /* When this function is called, it means that registers must be
1712 saved on entry to this function. So we migrate the
1713 call to the first insn of this function. */
1714 rtx temp;
1716 start_sequence ();
1717 temp = expand_builtin_apply_args_1 ();
1718 rtx_insn *seq = get_insns ();
1719 end_sequence ();
1721 apply_args_value = temp;
1723 /* Put the insns after the NOTE that starts the function.
1724 If this is inside a start_sequence, make the outer-level insn
1725 chain current, so the code is placed at the start of the
1726 function. If internal_arg_pointer is a non-virtual pseudo,
1727 it needs to be placed after the function that initializes
1728 that pseudo. */
1729 push_topmost_sequence ();
1730 if (REG_P (crtl->args.internal_arg_pointer)
1731 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1732 emit_insn_before (seq, parm_birth_insn);
1733 else
1734 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1735 pop_topmost_sequence ();
1736 return temp;
1740 /* Perform an untyped call and save the state required to perform an
1741 untyped return of whatever value was returned by the given function. */
1743 static rtx
1744 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1746 int size, align, regno;
1747 fixed_size_mode mode;
1748 rtx incoming_args, result, reg, dest, src;
1749 rtx_call_insn *call_insn;
1750 rtx old_stack_level = 0;
1751 rtx call_fusage = 0;
1752 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1754 arguments = convert_memory_address (Pmode, arguments);
1756 /* Create a block where the return registers can be saved. */
1757 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1759 /* Fetch the arg pointer from the ARGUMENTS block. */
1760 incoming_args = gen_reg_rtx (Pmode);
1761 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1762 if (!STACK_GROWS_DOWNWARD)
1763 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1764 incoming_args, 0, OPTAB_LIB_WIDEN);
1766 /* Push a new argument block and copy the arguments. Do not allow
1767 the (potential) memcpy call below to interfere with our stack
1768 manipulations. */
1769 do_pending_stack_adjust ();
1770 NO_DEFER_POP;
1772 /* Save the stack with nonlocal if available. */
1773 if (targetm.have_save_stack_nonlocal ())
1774 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1775 else
1776 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1778 /* Allocate a block of memory onto the stack and copy the memory
1779 arguments to the outgoing arguments address. We can pass TRUE
1780 as the 4th argument because we just saved the stack pointer
1781 and will restore it right after the call. */
1782 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1784 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1785 may have already set current_function_calls_alloca to true.
1786 current_function_calls_alloca won't be set if argsize is zero,
1787 so we have to guarantee need_drap is true here. */
1788 if (SUPPORTS_STACK_ALIGNMENT)
1789 crtl->need_drap = true;
1791 dest = virtual_outgoing_args_rtx;
1792 if (!STACK_GROWS_DOWNWARD)
1794 if (CONST_INT_P (argsize))
1795 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1796 else
1797 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1799 dest = gen_rtx_MEM (BLKmode, dest);
1800 set_mem_align (dest, PARM_BOUNDARY);
1801 src = gen_rtx_MEM (BLKmode, incoming_args);
1802 set_mem_align (src, PARM_BOUNDARY);
1803 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1805 /* Refer to the argument block. */
1806 apply_args_size ();
1807 arguments = gen_rtx_MEM (BLKmode, arguments);
1808 set_mem_align (arguments, PARM_BOUNDARY);
1810 /* Walk past the arg-pointer and structure value address. */
1811 size = GET_MODE_SIZE (Pmode);
1812 if (struct_value)
1813 size += GET_MODE_SIZE (Pmode);
1815 /* Restore each of the registers previously saved. Make USE insns
1816 for each of these registers for use in making the call. */
1817 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1818 if ((mode = apply_args_mode[regno]) != VOIDmode)
1820 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1821 if (size % align != 0)
1822 size = CEIL (size, align) * align;
1823 reg = gen_rtx_REG (mode, regno);
1824 emit_move_insn (reg, adjust_address (arguments, mode, size));
1825 use_reg (&call_fusage, reg);
1826 size += GET_MODE_SIZE (mode);
1829 /* Restore the structure value address unless this is passed as an
1830 "invisible" first argument. */
1831 size = GET_MODE_SIZE (Pmode);
1832 if (struct_value)
1834 rtx value = gen_reg_rtx (Pmode);
1835 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1836 emit_move_insn (struct_value, value);
1837 if (REG_P (struct_value))
1838 use_reg (&call_fusage, struct_value);
1841 /* All arguments and registers used for the call are set up by now! */
1842 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1844 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1845 and we don't want to load it into a register as an optimization,
1846 because prepare_call_address already did it if it should be done. */
1847 if (GET_CODE (function) != SYMBOL_REF)
1848 function = memory_address (FUNCTION_MODE, function);
1850 /* Generate the actual call instruction and save the return value. */
1851 if (targetm.have_untyped_call ())
1853 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1854 emit_call_insn (targetm.gen_untyped_call (mem, result,
1855 result_vector (1, result)));
1857 else if (targetm.have_call_value ())
1859 rtx valreg = 0;
1861 /* Locate the unique return register. It is not possible to
1862 express a call that sets more than one return register using
1863 call_value; use untyped_call for that. In fact, untyped_call
1864 only needs to save the return registers in the given block. */
1865 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1866 if ((mode = apply_result_mode[regno]) != VOIDmode)
1868 gcc_assert (!valreg); /* have_untyped_call required. */
1870 valreg = gen_rtx_REG (mode, regno);
1873 emit_insn (targetm.gen_call_value (valreg,
1874 gen_rtx_MEM (FUNCTION_MODE, function),
1875 const0_rtx, NULL_RTX, const0_rtx));
1877 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1879 else
1880 gcc_unreachable ();
1882 /* Find the CALL insn we just emitted, and attach the register usage
1883 information. */
1884 call_insn = last_call_insn ();
1885 add_function_usage_to (call_insn, call_fusage);
1887 /* Restore the stack. */
1888 if (targetm.have_save_stack_nonlocal ())
1889 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1890 else
1891 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1892 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1894 OK_DEFER_POP;
1896 /* Return the address of the result block. */
1897 result = copy_addr_to_reg (XEXP (result, 0));
1898 return convert_memory_address (ptr_mode, result);
1901 /* Perform an untyped return. */
1903 static void
1904 expand_builtin_return (rtx result)
1906 int size, align, regno;
1907 fixed_size_mode mode;
1908 rtx reg;
1909 rtx_insn *call_fusage = 0;
1911 result = convert_memory_address (Pmode, result);
1913 apply_result_size ();
1914 result = gen_rtx_MEM (BLKmode, result);
1916 if (targetm.have_untyped_return ())
1918 rtx vector = result_vector (0, result);
1919 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1920 emit_barrier ();
1921 return;
1924 /* Restore the return value and note that each value is used. */
1925 size = 0;
1926 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1927 if ((mode = apply_result_mode[regno]) != VOIDmode)
1929 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1930 if (size % align != 0)
1931 size = CEIL (size, align) * align;
1932 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1933 emit_move_insn (reg, adjust_address (result, mode, size));
1935 push_to_sequence (call_fusage);
1936 emit_use (reg);
1937 call_fusage = get_insns ();
1938 end_sequence ();
1939 size += GET_MODE_SIZE (mode);
1942 /* Put the USE insns before the return. */
1943 emit_insn (call_fusage);
1945 /* Return whatever values was restored by jumping directly to the end
1946 of the function. */
1947 expand_naked_return ();
1950 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1952 static enum type_class
1953 type_to_class (tree type)
1955 switch (TREE_CODE (type))
1957 case VOID_TYPE: return void_type_class;
1958 case INTEGER_TYPE: return integer_type_class;
1959 case ENUMERAL_TYPE: return enumeral_type_class;
1960 case BOOLEAN_TYPE: return boolean_type_class;
1961 case POINTER_TYPE: return pointer_type_class;
1962 case REFERENCE_TYPE: return reference_type_class;
1963 case OFFSET_TYPE: return offset_type_class;
1964 case REAL_TYPE: return real_type_class;
1965 case COMPLEX_TYPE: return complex_type_class;
1966 case FUNCTION_TYPE: return function_type_class;
1967 case METHOD_TYPE: return method_type_class;
1968 case RECORD_TYPE: return record_type_class;
1969 case UNION_TYPE:
1970 case QUAL_UNION_TYPE: return union_type_class;
1971 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1972 ? string_type_class : array_type_class);
1973 case LANG_TYPE: return lang_type_class;
1974 default: return no_type_class;
1978 /* Expand a call EXP to __builtin_classify_type. */
1980 static rtx
1981 expand_builtin_classify_type (tree exp)
1983 if (call_expr_nargs (exp))
1984 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1985 return GEN_INT (no_type_class);
1988 /* This helper macro, meant to be used in mathfn_built_in below, determines
1989 which among a set of builtin math functions is appropriate for a given type
1990 mode. The `F' (float) and `L' (long double) are automatically generated
1991 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1992 types, there are additional types that are considered with 'F32', 'F64',
1993 'F128', etc. suffixes. */
1994 #define CASE_MATHFN(MATHFN) \
1995 CASE_CFN_##MATHFN: \
1996 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1997 fcodel = BUILT_IN_##MATHFN##L ; break;
1998 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1999 types. */
2000 #define CASE_MATHFN_FLOATN(MATHFN) \
2001 CASE_CFN_##MATHFN: \
2002 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
2003 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
2004 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
2005 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
2006 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
2007 break;
2008 /* Similar to above, but appends _R after any F/L suffix. */
2009 #define CASE_MATHFN_REENT(MATHFN) \
2010 case CFN_BUILT_IN_##MATHFN##_R: \
2011 case CFN_BUILT_IN_##MATHFN##F_R: \
2012 case CFN_BUILT_IN_##MATHFN##L_R: \
2013 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
2014 fcodel = BUILT_IN_##MATHFN##L_R ; break;
2016 /* Return a function equivalent to FN but operating on floating-point
2017 values of type TYPE, or END_BUILTINS if no such function exists.
2018 This is purely an operation on function codes; it does not guarantee
2019 that the target actually has an implementation of the function. */
2021 static built_in_function
2022 mathfn_built_in_2 (tree type, combined_fn fn)
2024 tree mtype;
2025 built_in_function fcode, fcodef, fcodel;
2026 built_in_function fcodef16 = END_BUILTINS;
2027 built_in_function fcodef32 = END_BUILTINS;
2028 built_in_function fcodef64 = END_BUILTINS;
2029 built_in_function fcodef128 = END_BUILTINS;
2030 built_in_function fcodef32x = END_BUILTINS;
2031 built_in_function fcodef64x = END_BUILTINS;
2032 built_in_function fcodef128x = END_BUILTINS;
2034 switch (fn)
2036 CASE_MATHFN (ACOS)
2037 CASE_MATHFN (ACOSH)
2038 CASE_MATHFN (ASIN)
2039 CASE_MATHFN (ASINH)
2040 CASE_MATHFN (ATAN)
2041 CASE_MATHFN (ATAN2)
2042 CASE_MATHFN (ATANH)
2043 CASE_MATHFN (CBRT)
2044 CASE_MATHFN_FLOATN (CEIL)
2045 CASE_MATHFN (CEXPI)
2046 CASE_MATHFN_FLOATN (COPYSIGN)
2047 CASE_MATHFN (COS)
2048 CASE_MATHFN (COSH)
2049 CASE_MATHFN (DREM)
2050 CASE_MATHFN (ERF)
2051 CASE_MATHFN (ERFC)
2052 CASE_MATHFN (EXP)
2053 CASE_MATHFN (EXP10)
2054 CASE_MATHFN (EXP2)
2055 CASE_MATHFN (EXPM1)
2056 CASE_MATHFN (FABS)
2057 CASE_MATHFN (FDIM)
2058 CASE_MATHFN_FLOATN (FLOOR)
2059 CASE_MATHFN_FLOATN (FMA)
2060 CASE_MATHFN_FLOATN (FMAX)
2061 CASE_MATHFN_FLOATN (FMIN)
2062 CASE_MATHFN (FMOD)
2063 CASE_MATHFN (FREXP)
2064 CASE_MATHFN (GAMMA)
2065 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
2066 CASE_MATHFN (HUGE_VAL)
2067 CASE_MATHFN (HYPOT)
2068 CASE_MATHFN (ILOGB)
2069 CASE_MATHFN (ICEIL)
2070 CASE_MATHFN (IFLOOR)
2071 CASE_MATHFN (INF)
2072 CASE_MATHFN (IRINT)
2073 CASE_MATHFN (IROUND)
2074 CASE_MATHFN (ISINF)
2075 CASE_MATHFN (J0)
2076 CASE_MATHFN (J1)
2077 CASE_MATHFN (JN)
2078 CASE_MATHFN (LCEIL)
2079 CASE_MATHFN (LDEXP)
2080 CASE_MATHFN (LFLOOR)
2081 CASE_MATHFN (LGAMMA)
2082 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
2083 CASE_MATHFN (LLCEIL)
2084 CASE_MATHFN (LLFLOOR)
2085 CASE_MATHFN (LLRINT)
2086 CASE_MATHFN (LLROUND)
2087 CASE_MATHFN (LOG)
2088 CASE_MATHFN (LOG10)
2089 CASE_MATHFN (LOG1P)
2090 CASE_MATHFN (LOG2)
2091 CASE_MATHFN (LOGB)
2092 CASE_MATHFN (LRINT)
2093 CASE_MATHFN (LROUND)
2094 CASE_MATHFN (MODF)
2095 CASE_MATHFN (NAN)
2096 CASE_MATHFN (NANS)
2097 CASE_MATHFN_FLOATN (NEARBYINT)
2098 CASE_MATHFN (NEXTAFTER)
2099 CASE_MATHFN (NEXTTOWARD)
2100 CASE_MATHFN (POW)
2101 CASE_MATHFN (POWI)
2102 CASE_MATHFN (POW10)
2103 CASE_MATHFN (REMAINDER)
2104 CASE_MATHFN (REMQUO)
2105 CASE_MATHFN_FLOATN (RINT)
2106 CASE_MATHFN_FLOATN (ROUND)
2107 CASE_MATHFN_FLOATN (ROUNDEVEN)
2108 CASE_MATHFN (SCALB)
2109 CASE_MATHFN (SCALBLN)
2110 CASE_MATHFN (SCALBN)
2111 CASE_MATHFN (SIGNBIT)
2112 CASE_MATHFN (SIGNIFICAND)
2113 CASE_MATHFN (SIN)
2114 CASE_MATHFN (SINCOS)
2115 CASE_MATHFN (SINH)
2116 CASE_MATHFN_FLOATN (SQRT)
2117 CASE_MATHFN (TAN)
2118 CASE_MATHFN (TANH)
2119 CASE_MATHFN (TGAMMA)
2120 CASE_MATHFN_FLOATN (TRUNC)
2121 CASE_MATHFN (Y0)
2122 CASE_MATHFN (Y1)
2123 CASE_MATHFN (YN)
2125 default:
2126 return END_BUILTINS;
2129 mtype = TYPE_MAIN_VARIANT (type);
2130 if (mtype == double_type_node)
2131 return fcode;
2132 else if (mtype == float_type_node)
2133 return fcodef;
2134 else if (mtype == long_double_type_node)
2135 return fcodel;
2136 else if (mtype == float16_type_node)
2137 return fcodef16;
2138 else if (mtype == float32_type_node)
2139 return fcodef32;
2140 else if (mtype == float64_type_node)
2141 return fcodef64;
2142 else if (mtype == float128_type_node)
2143 return fcodef128;
2144 else if (mtype == float32x_type_node)
2145 return fcodef32x;
2146 else if (mtype == float64x_type_node)
2147 return fcodef64x;
2148 else if (mtype == float128x_type_node)
2149 return fcodef128x;
2150 else
2151 return END_BUILTINS;
2154 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2155 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2156 otherwise use the explicit declaration. If we can't do the conversion,
2157 return null. */
2159 static tree
2160 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2162 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2163 if (fcode2 == END_BUILTINS)
2164 return NULL_TREE;
2166 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2167 return NULL_TREE;
2169 return builtin_decl_explicit (fcode2);
2172 /* Like mathfn_built_in_1, but always use the implicit array. */
2174 tree
2175 mathfn_built_in (tree type, combined_fn fn)
2177 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2180 /* Like mathfn_built_in_1, but take a built_in_function and
2181 always use the implicit array. */
2183 tree
2184 mathfn_built_in (tree type, enum built_in_function fn)
2186 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2189 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2190 return its code, otherwise return IFN_LAST. Note that this function
2191 only tests whether the function is defined in internals.def, not whether
2192 it is actually available on the target. */
2194 internal_fn
2195 associated_internal_fn (tree fndecl)
2197 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2198 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2199 switch (DECL_FUNCTION_CODE (fndecl))
2201 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2202 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2203 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2204 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2205 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2206 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2207 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2208 #include "internal-fn.def"
2210 CASE_FLT_FN (BUILT_IN_POW10):
2211 return IFN_EXP10;
2213 CASE_FLT_FN (BUILT_IN_DREM):
2214 return IFN_REMAINDER;
2216 CASE_FLT_FN (BUILT_IN_SCALBN):
2217 CASE_FLT_FN (BUILT_IN_SCALBLN):
2218 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2219 return IFN_LDEXP;
2220 return IFN_LAST;
2222 default:
2223 return IFN_LAST;
2227 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2228 on the current target by a call to an internal function, return the
2229 code of that internal function, otherwise return IFN_LAST. The caller
2230 is responsible for ensuring that any side-effects of the built-in
2231 call are dealt with correctly. E.g. if CALL sets errno, the caller
2232 must decide that the errno result isn't needed or make it available
2233 in some other way. */
2235 internal_fn
2236 replacement_internal_fn (gcall *call)
2238 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2240 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2241 if (ifn != IFN_LAST)
2243 tree_pair types = direct_internal_fn_types (ifn, call);
2244 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2245 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2246 return ifn;
2249 return IFN_LAST;
2252 /* Expand a call to the builtin trinary math functions (fma).
2253 Return NULL_RTX if a normal call should be emitted rather than expanding the
2254 function in-line. EXP is the expression that is a call to the builtin
2255 function; if convenient, the result should be placed in TARGET.
2256 SUBTARGET may be used as the target for computing one of EXP's
2257 operands. */
2259 static rtx
2260 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2262 optab builtin_optab;
2263 rtx op0, op1, op2, result;
2264 rtx_insn *insns;
2265 tree fndecl = get_callee_fndecl (exp);
2266 tree arg0, arg1, arg2;
2267 machine_mode mode;
2269 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2270 return NULL_RTX;
2272 arg0 = CALL_EXPR_ARG (exp, 0);
2273 arg1 = CALL_EXPR_ARG (exp, 1);
2274 arg2 = CALL_EXPR_ARG (exp, 2);
2276 switch (DECL_FUNCTION_CODE (fndecl))
2278 CASE_FLT_FN (BUILT_IN_FMA):
2279 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2280 builtin_optab = fma_optab; break;
2281 default:
2282 gcc_unreachable ();
2285 /* Make a suitable register to place result in. */
2286 mode = TYPE_MODE (TREE_TYPE (exp));
2288 /* Before working hard, check whether the instruction is available. */
2289 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2290 return NULL_RTX;
2292 result = gen_reg_rtx (mode);
2294 /* Always stabilize the argument list. */
2295 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2296 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2297 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2299 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2300 op1 = expand_normal (arg1);
2301 op2 = expand_normal (arg2);
2303 start_sequence ();
2305 /* Compute into RESULT.
2306 Set RESULT to wherever the result comes back. */
2307 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2308 result, 0);
2310 /* If we were unable to expand via the builtin, stop the sequence
2311 (without outputting the insns) and call to the library function
2312 with the stabilized argument list. */
2313 if (result == 0)
2315 end_sequence ();
2316 return expand_call (exp, target, target == const0_rtx);
2319 /* Output the entire sequence. */
2320 insns = get_insns ();
2321 end_sequence ();
2322 emit_insn (insns);
2324 return result;
2327 /* Expand a call to the builtin sin and cos math functions.
2328 Return NULL_RTX if a normal call should be emitted rather than expanding the
2329 function in-line. EXP is the expression that is a call to the builtin
2330 function; if convenient, the result should be placed in TARGET.
2331 SUBTARGET may be used as the target for computing one of EXP's
2332 operands. */
2334 static rtx
2335 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2337 optab builtin_optab;
2338 rtx op0;
2339 rtx_insn *insns;
2340 tree fndecl = get_callee_fndecl (exp);
2341 machine_mode mode;
2342 tree arg;
2344 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2345 return NULL_RTX;
2347 arg = CALL_EXPR_ARG (exp, 0);
2349 switch (DECL_FUNCTION_CODE (fndecl))
2351 CASE_FLT_FN (BUILT_IN_SIN):
2352 CASE_FLT_FN (BUILT_IN_COS):
2353 builtin_optab = sincos_optab; break;
2354 default:
2355 gcc_unreachable ();
2358 /* Make a suitable register to place result in. */
2359 mode = TYPE_MODE (TREE_TYPE (exp));
2361 /* Check if sincos insn is available, otherwise fallback
2362 to sin or cos insn. */
2363 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2364 switch (DECL_FUNCTION_CODE (fndecl))
2366 CASE_FLT_FN (BUILT_IN_SIN):
2367 builtin_optab = sin_optab; break;
2368 CASE_FLT_FN (BUILT_IN_COS):
2369 builtin_optab = cos_optab; break;
2370 default:
2371 gcc_unreachable ();
2374 /* Before working hard, check whether the instruction is available. */
2375 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2377 rtx result = gen_reg_rtx (mode);
2379 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2380 need to expand the argument again. This way, we will not perform
2381 side-effects more the once. */
2382 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2384 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2386 start_sequence ();
2388 /* Compute into RESULT.
2389 Set RESULT to wherever the result comes back. */
2390 if (builtin_optab == sincos_optab)
2392 int ok;
2394 switch (DECL_FUNCTION_CODE (fndecl))
2396 CASE_FLT_FN (BUILT_IN_SIN):
2397 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2398 break;
2399 CASE_FLT_FN (BUILT_IN_COS):
2400 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2401 break;
2402 default:
2403 gcc_unreachable ();
2405 gcc_assert (ok);
2407 else
2408 result = expand_unop (mode, builtin_optab, op0, result, 0);
2410 if (result != 0)
2412 /* Output the entire sequence. */
2413 insns = get_insns ();
2414 end_sequence ();
2415 emit_insn (insns);
2416 return result;
2419 /* If we were unable to expand via the builtin, stop the sequence
2420 (without outputting the insns) and call to the library function
2421 with the stabilized argument list. */
2422 end_sequence ();
2425 return expand_call (exp, target, target == const0_rtx);
2428 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2429 return an RTL instruction code that implements the functionality.
2430 If that isn't possible or available return CODE_FOR_nothing. */
2432 static enum insn_code
2433 interclass_mathfn_icode (tree arg, tree fndecl)
2435 bool errno_set = false;
2436 optab builtin_optab = unknown_optab;
2437 machine_mode mode;
2439 switch (DECL_FUNCTION_CODE (fndecl))
2441 CASE_FLT_FN (BUILT_IN_ILOGB):
2442 errno_set = true; builtin_optab = ilogb_optab; break;
2443 CASE_FLT_FN (BUILT_IN_ISINF):
2444 builtin_optab = isinf_optab; break;
2445 case BUILT_IN_ISNORMAL:
2446 case BUILT_IN_ISFINITE:
2447 CASE_FLT_FN (BUILT_IN_FINITE):
2448 case BUILT_IN_FINITED32:
2449 case BUILT_IN_FINITED64:
2450 case BUILT_IN_FINITED128:
2451 case BUILT_IN_ISINFD32:
2452 case BUILT_IN_ISINFD64:
2453 case BUILT_IN_ISINFD128:
2454 /* These builtins have no optabs (yet). */
2455 break;
2456 default:
2457 gcc_unreachable ();
2460 /* There's no easy way to detect the case we need to set EDOM. */
2461 if (flag_errno_math && errno_set)
2462 return CODE_FOR_nothing;
2464 /* Optab mode depends on the mode of the input argument. */
2465 mode = TYPE_MODE (TREE_TYPE (arg));
2467 if (builtin_optab)
2468 return optab_handler (builtin_optab, mode);
2469 return CODE_FOR_nothing;
2472 /* Expand a call to one of the builtin math functions that operate on
2473 floating point argument and output an integer result (ilogb, isinf,
2474 isnan, etc).
2475 Return 0 if a normal call should be emitted rather than expanding the
2476 function in-line. EXP is the expression that is a call to the builtin
2477 function; if convenient, the result should be placed in TARGET. */
2479 static rtx
2480 expand_builtin_interclass_mathfn (tree exp, rtx target)
2482 enum insn_code icode = CODE_FOR_nothing;
2483 rtx op0;
2484 tree fndecl = get_callee_fndecl (exp);
2485 machine_mode mode;
2486 tree arg;
2488 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2489 return NULL_RTX;
2491 arg = CALL_EXPR_ARG (exp, 0);
2492 icode = interclass_mathfn_icode (arg, fndecl);
2493 mode = TYPE_MODE (TREE_TYPE (arg));
2495 if (icode != CODE_FOR_nothing)
2497 class expand_operand ops[1];
2498 rtx_insn *last = get_last_insn ();
2499 tree orig_arg = arg;
2501 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2502 need to expand the argument again. This way, we will not perform
2503 side-effects more the once. */
2504 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2506 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2508 if (mode != GET_MODE (op0))
2509 op0 = convert_to_mode (mode, op0, 0);
2511 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2512 if (maybe_legitimize_operands (icode, 0, 1, ops)
2513 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2514 return ops[0].value;
2516 delete_insns_since (last);
2517 CALL_EXPR_ARG (exp, 0) = orig_arg;
2520 return NULL_RTX;
2523 /* Expand a call to the builtin sincos math function.
2524 Return NULL_RTX if a normal call should be emitted rather than expanding the
2525 function in-line. EXP is the expression that is a call to the builtin
2526 function. */
2528 static rtx
2529 expand_builtin_sincos (tree exp)
2531 rtx op0, op1, op2, target1, target2;
2532 machine_mode mode;
2533 tree arg, sinp, cosp;
2534 int result;
2535 location_t loc = EXPR_LOCATION (exp);
2536 tree alias_type, alias_off;
2538 if (!validate_arglist (exp, REAL_TYPE,
2539 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2540 return NULL_RTX;
2542 arg = CALL_EXPR_ARG (exp, 0);
2543 sinp = CALL_EXPR_ARG (exp, 1);
2544 cosp = CALL_EXPR_ARG (exp, 2);
2546 /* Make a suitable register to place result in. */
2547 mode = TYPE_MODE (TREE_TYPE (arg));
2549 /* Check if sincos insn is available, otherwise emit the call. */
2550 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2551 return NULL_RTX;
2553 target1 = gen_reg_rtx (mode);
2554 target2 = gen_reg_rtx (mode);
2556 op0 = expand_normal (arg);
2557 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2558 alias_off = build_int_cst (alias_type, 0);
2559 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2560 sinp, alias_off));
2561 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2562 cosp, alias_off));
2564 /* Compute into target1 and target2.
2565 Set TARGET to wherever the result comes back. */
2566 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2567 gcc_assert (result);
2569 /* Move target1 and target2 to the memory locations indicated
2570 by op1 and op2. */
2571 emit_move_insn (op1, target1);
2572 emit_move_insn (op2, target2);
2574 return const0_rtx;
2577 /* Expand a call to the internal cexpi builtin to the sincos math function.
2578 EXP is the expression that is a call to the builtin function; if convenient,
2579 the result should be placed in TARGET. */
2581 static rtx
2582 expand_builtin_cexpi (tree exp, rtx target)
2584 tree fndecl = get_callee_fndecl (exp);
2585 tree arg, type;
2586 machine_mode mode;
2587 rtx op0, op1, op2;
2588 location_t loc = EXPR_LOCATION (exp);
2590 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2591 return NULL_RTX;
2593 arg = CALL_EXPR_ARG (exp, 0);
2594 type = TREE_TYPE (arg);
2595 mode = TYPE_MODE (TREE_TYPE (arg));
2597 /* Try expanding via a sincos optab, fall back to emitting a libcall
2598 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2599 is only generated from sincos, cexp or if we have either of them. */
2600 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2602 op1 = gen_reg_rtx (mode);
2603 op2 = gen_reg_rtx (mode);
2605 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2607 /* Compute into op1 and op2. */
2608 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2610 else if (targetm.libc_has_function (function_sincos))
2612 tree call, fn = NULL_TREE;
2613 tree top1, top2;
2614 rtx op1a, op2a;
2616 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2617 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2618 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2619 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2620 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2621 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2622 else
2623 gcc_unreachable ();
2625 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2626 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2627 op1a = copy_addr_to_reg (XEXP (op1, 0));
2628 op2a = copy_addr_to_reg (XEXP (op2, 0));
2629 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2630 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2632 /* Make sure not to fold the sincos call again. */
2633 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2634 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2635 call, 3, arg, top1, top2));
2637 else
2639 tree call, fn = NULL_TREE, narg;
2640 tree ctype = build_complex_type (type);
2642 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2643 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2644 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2645 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2646 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2647 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2648 else
2649 gcc_unreachable ();
2651 /* If we don't have a decl for cexp create one. This is the
2652 friendliest fallback if the user calls __builtin_cexpi
2653 without full target C99 function support. */
2654 if (fn == NULL_TREE)
2656 tree fntype;
2657 const char *name = NULL;
2659 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2660 name = "cexpf";
2661 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2662 name = "cexp";
2663 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2664 name = "cexpl";
2666 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2667 fn = build_fn_decl (name, fntype);
2670 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2671 build_real (type, dconst0), arg);
2673 /* Make sure not to fold the cexp call again. */
2674 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2675 return expand_expr (build_call_nary (ctype, call, 1, narg),
2676 target, VOIDmode, EXPAND_NORMAL);
2679 /* Now build the proper return type. */
2680 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2681 make_tree (TREE_TYPE (arg), op2),
2682 make_tree (TREE_TYPE (arg), op1)),
2683 target, VOIDmode, EXPAND_NORMAL);
2686 /* Conveniently construct a function call expression. FNDECL names the
2687 function to be called, N is the number of arguments, and the "..."
2688 parameters are the argument expressions. Unlike build_call_exr
2689 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2691 static tree
2692 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2694 va_list ap;
2695 tree fntype = TREE_TYPE (fndecl);
2696 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2698 va_start (ap, n);
2699 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2700 va_end (ap);
2701 SET_EXPR_LOCATION (fn, loc);
2702 return fn;
2705 /* Expand a call to one of the builtin rounding functions gcc defines
2706 as an extension (lfloor and lceil). As these are gcc extensions we
2707 do not need to worry about setting errno to EDOM.
2708 If expanding via optab fails, lower expression to (int)(floor(x)).
2709 EXP is the expression that is a call to the builtin function;
2710 if convenient, the result should be placed in TARGET. */
2712 static rtx
2713 expand_builtin_int_roundingfn (tree exp, rtx target)
2715 convert_optab builtin_optab;
2716 rtx op0, tmp;
2717 rtx_insn *insns;
2718 tree fndecl = get_callee_fndecl (exp);
2719 enum built_in_function fallback_fn;
2720 tree fallback_fndecl;
2721 machine_mode mode;
2722 tree arg;
2724 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2725 return NULL_RTX;
2727 arg = CALL_EXPR_ARG (exp, 0);
2729 switch (DECL_FUNCTION_CODE (fndecl))
2731 CASE_FLT_FN (BUILT_IN_ICEIL):
2732 CASE_FLT_FN (BUILT_IN_LCEIL):
2733 CASE_FLT_FN (BUILT_IN_LLCEIL):
2734 builtin_optab = lceil_optab;
2735 fallback_fn = BUILT_IN_CEIL;
2736 break;
2738 CASE_FLT_FN (BUILT_IN_IFLOOR):
2739 CASE_FLT_FN (BUILT_IN_LFLOOR):
2740 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2741 builtin_optab = lfloor_optab;
2742 fallback_fn = BUILT_IN_FLOOR;
2743 break;
2745 default:
2746 gcc_unreachable ();
2749 /* Make a suitable register to place result in. */
2750 mode = TYPE_MODE (TREE_TYPE (exp));
2752 target = gen_reg_rtx (mode);
2754 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2755 need to expand the argument again. This way, we will not perform
2756 side-effects more the once. */
2757 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2759 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2761 start_sequence ();
2763 /* Compute into TARGET. */
2764 if (expand_sfix_optab (target, op0, builtin_optab))
2766 /* Output the entire sequence. */
2767 insns = get_insns ();
2768 end_sequence ();
2769 emit_insn (insns);
2770 return target;
2773 /* If we were unable to expand via the builtin, stop the sequence
2774 (without outputting the insns). */
2775 end_sequence ();
2777 /* Fall back to floating point rounding optab. */
2778 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2780 /* For non-C99 targets we may end up without a fallback fndecl here
2781 if the user called __builtin_lfloor directly. In this case emit
2782 a call to the floor/ceil variants nevertheless. This should result
2783 in the best user experience for not full C99 targets. */
2784 if (fallback_fndecl == NULL_TREE)
2786 tree fntype;
2787 const char *name = NULL;
2789 switch (DECL_FUNCTION_CODE (fndecl))
2791 case BUILT_IN_ICEIL:
2792 case BUILT_IN_LCEIL:
2793 case BUILT_IN_LLCEIL:
2794 name = "ceil";
2795 break;
2796 case BUILT_IN_ICEILF:
2797 case BUILT_IN_LCEILF:
2798 case BUILT_IN_LLCEILF:
2799 name = "ceilf";
2800 break;
2801 case BUILT_IN_ICEILL:
2802 case BUILT_IN_LCEILL:
2803 case BUILT_IN_LLCEILL:
2804 name = "ceill";
2805 break;
2806 case BUILT_IN_IFLOOR:
2807 case BUILT_IN_LFLOOR:
2808 case BUILT_IN_LLFLOOR:
2809 name = "floor";
2810 break;
2811 case BUILT_IN_IFLOORF:
2812 case BUILT_IN_LFLOORF:
2813 case BUILT_IN_LLFLOORF:
2814 name = "floorf";
2815 break;
2816 case BUILT_IN_IFLOORL:
2817 case BUILT_IN_LFLOORL:
2818 case BUILT_IN_LLFLOORL:
2819 name = "floorl";
2820 break;
2821 default:
2822 gcc_unreachable ();
2825 fntype = build_function_type_list (TREE_TYPE (arg),
2826 TREE_TYPE (arg), NULL_TREE);
2827 fallback_fndecl = build_fn_decl (name, fntype);
2830 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2832 tmp = expand_normal (exp);
2833 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2835 /* Truncate the result of floating point optab to integer
2836 via expand_fix (). */
2837 target = gen_reg_rtx (mode);
2838 expand_fix (target, tmp, 0);
2840 return target;
2843 /* Expand a call to one of the builtin math functions doing integer
2844 conversion (lrint).
2845 Return 0 if a normal call should be emitted rather than expanding the
2846 function in-line. EXP is the expression that is a call to the builtin
2847 function; if convenient, the result should be placed in TARGET. */
2849 static rtx
2850 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2852 convert_optab builtin_optab;
2853 rtx op0;
2854 rtx_insn *insns;
2855 tree fndecl = get_callee_fndecl (exp);
2856 tree arg;
2857 machine_mode mode;
2858 enum built_in_function fallback_fn = BUILT_IN_NONE;
2860 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2861 return NULL_RTX;
2863 arg = CALL_EXPR_ARG (exp, 0);
2865 switch (DECL_FUNCTION_CODE (fndecl))
2867 CASE_FLT_FN (BUILT_IN_IRINT):
2868 fallback_fn = BUILT_IN_LRINT;
2869 gcc_fallthrough ();
2870 CASE_FLT_FN (BUILT_IN_LRINT):
2871 CASE_FLT_FN (BUILT_IN_LLRINT):
2872 builtin_optab = lrint_optab;
2873 break;
2875 CASE_FLT_FN (BUILT_IN_IROUND):
2876 fallback_fn = BUILT_IN_LROUND;
2877 gcc_fallthrough ();
2878 CASE_FLT_FN (BUILT_IN_LROUND):
2879 CASE_FLT_FN (BUILT_IN_LLROUND):
2880 builtin_optab = lround_optab;
2881 break;
2883 default:
2884 gcc_unreachable ();
2887 /* There's no easy way to detect the case we need to set EDOM. */
2888 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2889 return NULL_RTX;
2891 /* Make a suitable register to place result in. */
2892 mode = TYPE_MODE (TREE_TYPE (exp));
2894 /* There's no easy way to detect the case we need to set EDOM. */
2895 if (!flag_errno_math)
2897 rtx result = gen_reg_rtx (mode);
2899 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2900 need to expand the argument again. This way, we will not perform
2901 side-effects more the once. */
2902 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2904 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2906 start_sequence ();
2908 if (expand_sfix_optab (result, op0, builtin_optab))
2910 /* Output the entire sequence. */
2911 insns = get_insns ();
2912 end_sequence ();
2913 emit_insn (insns);
2914 return result;
2917 /* If we were unable to expand via the builtin, stop the sequence
2918 (without outputting the insns) and call to the library function
2919 with the stabilized argument list. */
2920 end_sequence ();
2923 if (fallback_fn != BUILT_IN_NONE)
2925 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2926 targets, (int) round (x) should never be transformed into
2927 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2928 a call to lround in the hope that the target provides at least some
2929 C99 functions. This should result in the best user experience for
2930 not full C99 targets. */
2931 tree fallback_fndecl = mathfn_built_in_1
2932 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2934 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2935 fallback_fndecl, 1, arg);
2937 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2938 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2939 return convert_to_mode (mode, target, 0);
2942 return expand_call (exp, target, target == const0_rtx);
2945 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2946 a normal call should be emitted rather than expanding the function
2947 in-line. EXP is the expression that is a call to the builtin
2948 function; if convenient, the result should be placed in TARGET. */
2950 static rtx
2951 expand_builtin_powi (tree exp, rtx target)
2953 tree arg0, arg1;
2954 rtx op0, op1;
2955 machine_mode mode;
2956 machine_mode mode2;
2958 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2959 return NULL_RTX;
2961 arg0 = CALL_EXPR_ARG (exp, 0);
2962 arg1 = CALL_EXPR_ARG (exp, 1);
2963 mode = TYPE_MODE (TREE_TYPE (exp));
2965 /* Emit a libcall to libgcc. */
2967 /* Mode of the 2nd argument must match that of an int. */
2968 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2970 if (target == NULL_RTX)
2971 target = gen_reg_rtx (mode);
2973 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2974 if (GET_MODE (op0) != mode)
2975 op0 = convert_to_mode (mode, op0, 0);
2976 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2977 if (GET_MODE (op1) != mode2)
2978 op1 = convert_to_mode (mode2, op1, 0);
2980 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2981 target, LCT_CONST, mode,
2982 op0, mode, op1, mode2);
2984 return target;
2987 /* Expand expression EXP which is a call to the strlen builtin. Return
2988 NULL_RTX if we failed and the caller should emit a normal call, otherwise
2989 try to get the result in TARGET, if convenient. */
2991 static rtx
2992 expand_builtin_strlen (tree exp, rtx target,
2993 machine_mode target_mode)
2995 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2996 return NULL_RTX;
2998 class expand_operand ops[4];
2999 rtx pat;
3000 tree len;
3001 tree src = CALL_EXPR_ARG (exp, 0);
3002 rtx src_reg;
3003 rtx_insn *before_strlen;
3004 machine_mode insn_mode;
3005 enum insn_code icode = CODE_FOR_nothing;
3006 unsigned int align;
3008 /* If the length can be computed at compile-time, return it. */
3009 len = c_strlen (src, 0);
3010 if (len)
3011 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3013 /* If the length can be computed at compile-time and is constant
3014 integer, but there are side-effects in src, evaluate
3015 src for side-effects, then return len.
3016 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3017 can be optimized into: i++; x = 3; */
3018 len = c_strlen (src, 1);
3019 if (len && TREE_CODE (len) == INTEGER_CST)
3021 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3022 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3025 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3027 /* If SRC is not a pointer type, don't do this operation inline. */
3028 if (align == 0)
3029 return NULL_RTX;
3031 /* Bail out if we can't compute strlen in the right mode. */
3032 FOR_EACH_MODE_FROM (insn_mode, target_mode)
3034 icode = optab_handler (strlen_optab, insn_mode);
3035 if (icode != CODE_FOR_nothing)
3036 break;
3038 if (insn_mode == VOIDmode)
3039 return NULL_RTX;
3041 /* Make a place to hold the source address. We will not expand
3042 the actual source until we are sure that the expansion will
3043 not fail -- there are trees that cannot be expanded twice. */
3044 src_reg = gen_reg_rtx (Pmode);
3046 /* Mark the beginning of the strlen sequence so we can emit the
3047 source operand later. */
3048 before_strlen = get_last_insn ();
3050 create_output_operand (&ops[0], target, insn_mode);
3051 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3052 create_integer_operand (&ops[2], 0);
3053 create_integer_operand (&ops[3], align);
3054 if (!maybe_expand_insn (icode, 4, ops))
3055 return NULL_RTX;
3057 /* Check to see if the argument was declared attribute nonstring
3058 and if so, issue a warning since at this point it's not known
3059 to be nul-terminated. */
3060 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3062 /* Now that we are assured of success, expand the source. */
3063 start_sequence ();
3064 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3065 if (pat != src_reg)
3067 #ifdef POINTERS_EXTEND_UNSIGNED
3068 if (GET_MODE (pat) != Pmode)
3069 pat = convert_to_mode (Pmode, pat,
3070 POINTERS_EXTEND_UNSIGNED);
3071 #endif
3072 emit_move_insn (src_reg, pat);
3074 pat = get_insns ();
3075 end_sequence ();
3077 if (before_strlen)
3078 emit_insn_after (pat, before_strlen);
3079 else
3080 emit_insn_before (pat, get_insns ());
3082 /* Return the value in the proper mode for this function. */
3083 if (GET_MODE (ops[0].value) == target_mode)
3084 target = ops[0].value;
3085 else if (target != 0)
3086 convert_move (target, ops[0].value, 0);
3087 else
3088 target = convert_to_mode (target_mode, ops[0].value, 0);
3090 return target;
3093 /* Expand call EXP to the strnlen built-in, returning the result
3094 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3096 static rtx
3097 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3099 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3100 return NULL_RTX;
3102 tree src = CALL_EXPR_ARG (exp, 0);
3103 tree bound = CALL_EXPR_ARG (exp, 1);
3105 if (!bound)
3106 return NULL_RTX;
3108 location_t loc = UNKNOWN_LOCATION;
3109 if (EXPR_HAS_LOCATION (exp))
3110 loc = EXPR_LOCATION (exp);
3112 tree maxobjsize = max_object_size ();
3113 tree func = get_callee_fndecl (exp);
3115 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3116 so these conversions aren't necessary. */
3117 c_strlen_data lendata = { };
3118 tree len = c_strlen (src, 0, &lendata, 1);
3119 if (len)
3120 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3122 if (TREE_CODE (bound) == INTEGER_CST)
3124 if (!TREE_NO_WARNING (exp)
3125 && tree_int_cst_lt (maxobjsize, bound)
3126 && warning_at (loc, OPT_Wstringop_overflow_,
3127 "%K%qD specified bound %E "
3128 "exceeds maximum object size %E",
3129 exp, func, bound, maxobjsize))
3130 TREE_NO_WARNING (exp) = true;
3132 bool exact = true;
3133 if (!len || TREE_CODE (len) != INTEGER_CST)
3135 /* Clear EXACT if LEN may be less than SRC suggests,
3136 such as in
3137 strnlen (&a[i], sizeof a)
3138 where the value of i is unknown. Unless i's value is
3139 zero, the call is unsafe because the bound is greater. */
3140 lendata.decl = unterminated_array (src, &len, &exact);
3141 if (!lendata.decl)
3142 return NULL_RTX;
3145 if (lendata.decl
3146 && !TREE_NO_WARNING (exp)
3147 && ((tree_int_cst_lt (len, bound))
3148 || !exact))
3150 location_t warnloc
3151 = expansion_point_location_if_in_system_header (loc);
3153 if (warning_at (warnloc, OPT_Wstringop_overflow_,
3154 exact
3155 ? G_("%K%qD specified bound %E exceeds the size %E "
3156 "of unterminated array")
3157 : G_("%K%qD specified bound %E may exceed the size "
3158 "of at most %E of unterminated array"),
3159 exp, func, bound, len))
3161 inform (DECL_SOURCE_LOCATION (lendata.decl),
3162 "referenced argument declared here");
3163 TREE_NO_WARNING (exp) = true;
3164 return NULL_RTX;
3168 if (!len)
3169 return NULL_RTX;
3171 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3172 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3175 if (TREE_CODE (bound) != SSA_NAME)
3176 return NULL_RTX;
3178 wide_int min, max;
3179 enum value_range_kind rng = get_range_info (bound, &min, &max);
3180 if (rng != VR_RANGE)
3181 return NULL_RTX;
3183 if (!TREE_NO_WARNING (exp)
3184 && wi::ltu_p (wi::to_wide (maxobjsize, min.get_precision ()), min)
3185 && warning_at (loc, OPT_Wstringop_overflow_,
3186 "%K%qD specified bound [%wu, %wu] "
3187 "exceeds maximum object size %E",
3188 exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
3189 TREE_NO_WARNING (exp) = true;
3191 bool exact = true;
3192 if (!len || TREE_CODE (len) != INTEGER_CST)
3194 lendata.decl = unterminated_array (src, &len, &exact);
3195 if (!lendata.decl)
3196 return NULL_RTX;
3199 if (lendata.decl
3200 && !TREE_NO_WARNING (exp)
3201 && (wi::ltu_p (wi::to_wide (len), min)
3202 || !exact))
3204 location_t warnloc
3205 = expansion_point_location_if_in_system_header (loc);
3207 if (warning_at (warnloc, OPT_Wstringop_overflow_,
3208 exact
3209 ? G_("%K%qD specified bound [%wu, %wu] exceeds "
3210 "the size %E of unterminated array")
3211 : G_("%K%qD specified bound [%wu, %wu] may exceed "
3212 "the size of at most %E of unterminated array"),
3213 exp, func, min.to_uhwi (), max.to_uhwi (), len))
3215 inform (DECL_SOURCE_LOCATION (lendata.decl),
3216 "referenced argument declared here");
3217 TREE_NO_WARNING (exp) = true;
3221 if (lendata.decl)
3222 return NULL_RTX;
3224 if (wi::gtu_p (min, wi::to_wide (len)))
3225 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3227 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3228 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3231 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3232 bytes from constant string DATA + OFFSET and return it as target
3233 constant. */
3235 static rtx
3236 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3237 scalar_int_mode mode)
3239 const char *str = (const char *) data;
3241 gcc_assert (offset >= 0
3242 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3243 <= strlen (str) + 1));
3245 return c_readstr (str + offset, mode);
3248 /* LEN specify length of the block of memcpy/memset operation.
3249 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3250 In some cases we can make very likely guess on max size, then we
3251 set it into PROBABLE_MAX_SIZE. */
3253 static void
3254 determine_block_size (tree len, rtx len_rtx,
3255 unsigned HOST_WIDE_INT *min_size,
3256 unsigned HOST_WIDE_INT *max_size,
3257 unsigned HOST_WIDE_INT *probable_max_size)
3259 if (CONST_INT_P (len_rtx))
3261 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3262 return;
3264 else
3266 wide_int min, max;
3267 enum value_range_kind range_type = VR_UNDEFINED;
3269 /* Determine bounds from the type. */
3270 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3271 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3272 else
3273 *min_size = 0;
3274 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3275 *probable_max_size = *max_size
3276 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3277 else
3278 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3280 if (TREE_CODE (len) == SSA_NAME)
3281 range_type = get_range_info (len, &min, &max);
3282 if (range_type == VR_RANGE)
3284 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3285 *min_size = min.to_uhwi ();
3286 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3287 *probable_max_size = *max_size = max.to_uhwi ();
3289 else if (range_type == VR_ANTI_RANGE)
3291 /* Anti range 0...N lets us to determine minimal size to N+1. */
3292 if (min == 0)
3294 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3295 *min_size = max.to_uhwi () + 1;
3297 /* Code like
3299 int n;
3300 if (n < 100)
3301 memcpy (a, b, n)
3303 Produce anti range allowing negative values of N. We still
3304 can use the information and make a guess that N is not negative.
3306 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3307 *probable_max_size = min.to_uhwi () - 1;
3310 gcc_checking_assert (*max_size <=
3311 (unsigned HOST_WIDE_INT)
3312 GET_MODE_MASK (GET_MODE (len_rtx)));
3315 /* Try to verify that the sizes and lengths of the arguments to a string
3316 manipulation function given by EXP are within valid bounds and that
3317 the operation does not lead to buffer overflow or read past the end.
3318 Arguments other than EXP may be null. When non-null, the arguments
3319 have the following meaning:
3320 DST is the destination of a copy call or NULL otherwise.
3321 SRC is the source of a copy call or NULL otherwise.
3322 DSTWRITE is the number of bytes written into the destination obtained
3323 from the user-supplied size argument to the function (such as in
3324 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3325 MAXREAD is the user-supplied bound on the length of the source sequence
3326 (such as in strncat(d, s, N). It specifies the upper limit on the number
3327 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3328 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3329 expression EXP is a string function call (as opposed to a memory call
3330 like memcpy). As an exception, SRCSTR can also be an integer denoting
3331 the precomputed size of the source string or object (for functions like
3332 memcpy).
3333 DSTSIZE is the size of the destination object specified by the last
3334 argument to the _chk builtins, typically resulting from the expansion
3335 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3336 DSTSIZE).
3338 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3339 SIZE_MAX.
3341 If the call is successfully verified as safe return true, otherwise
3342 return false. */
3344 bool
3345 check_access (tree exp, tree, tree, tree dstwrite,
3346 tree maxread, tree srcstr, tree dstsize)
3348 int opt = OPT_Wstringop_overflow_;
3350 /* The size of the largest object is half the address space, or
3351 PTRDIFF_MAX. (This is way too permissive.) */
3352 tree maxobjsize = max_object_size ();
3354 /* Either the length of the source string for string functions or
3355 the size of the source object for raw memory functions. */
3356 tree slen = NULL_TREE;
3358 tree range[2] = { NULL_TREE, NULL_TREE };
3360 /* Set to true when the exact number of bytes written by a string
3361 function like strcpy is not known and the only thing that is
3362 known is that it must be at least one (for the terminating nul). */
3363 bool at_least_one = false;
3364 if (srcstr)
3366 /* SRCSTR is normally a pointer to string but as a special case
3367 it can be an integer denoting the length of a string. */
3368 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3370 /* Try to determine the range of lengths the source string
3371 refers to. If it can be determined and is less than
3372 the upper bound given by MAXREAD add one to it for
3373 the terminating nul. Otherwise, set it to one for
3374 the same reason, or to MAXREAD as appropriate. */
3375 c_strlen_data lendata = { };
3376 get_range_strlen (srcstr, &lendata, /* eltsize = */ 1);
3377 range[0] = lendata.minlen;
3378 range[1] = lendata.maxbound ? lendata.maxbound : lendata.maxlen;
3379 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3381 if (maxread && tree_int_cst_le (maxread, range[0]))
3382 range[0] = range[1] = maxread;
3383 else
3384 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3385 range[0], size_one_node);
3387 if (maxread && tree_int_cst_le (maxread, range[1]))
3388 range[1] = maxread;
3389 else if (!integer_all_onesp (range[1]))
3390 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3391 range[1], size_one_node);
3393 slen = range[0];
3395 else
3397 at_least_one = true;
3398 slen = size_one_node;
3401 else
3402 slen = srcstr;
3405 if (!dstwrite && !maxread)
3407 /* When the only available piece of data is the object size
3408 there is nothing to do. */
3409 if (!slen)
3410 return true;
3412 /* Otherwise, when the length of the source sequence is known
3413 (as with strlen), set DSTWRITE to it. */
3414 if (!range[0])
3415 dstwrite = slen;
3418 if (!dstsize)
3419 dstsize = maxobjsize;
3421 if (dstwrite)
3422 get_size_range (dstwrite, range);
3424 tree func = get_callee_fndecl (exp);
3426 /* First check the number of bytes to be written against the maximum
3427 object size. */
3428 if (range[0]
3429 && TREE_CODE (range[0]) == INTEGER_CST
3430 && tree_int_cst_lt (maxobjsize, range[0]))
3432 if (TREE_NO_WARNING (exp))
3433 return false;
3435 location_t loc = tree_nonartificial_location (exp);
3436 loc = expansion_point_location_if_in_system_header (loc);
3438 bool warned;
3439 if (range[0] == range[1])
3440 warned = (func
3441 ? warning_at (loc, opt,
3442 "%K%qD specified size %E "
3443 "exceeds maximum object size %E",
3444 exp, func, range[0], maxobjsize)
3445 : warning_at (loc, opt,
3446 "%Kspecified size %E "
3447 "exceeds maximum object size %E",
3448 exp, range[0], maxobjsize));
3449 else
3450 warned = (func
3451 ? warning_at (loc, opt,
3452 "%K%qD specified size between %E and %E "
3453 "exceeds maximum object size %E",
3454 exp, func,
3455 range[0], range[1], maxobjsize)
3456 : warning_at (loc, opt,
3457 "%Kspecified size between %E and %E "
3458 "exceeds maximum object size %E",
3459 exp, range[0], range[1], maxobjsize));
3460 if (warned)
3461 TREE_NO_WARNING (exp) = true;
3463 return false;
3466 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3467 constant, and in range of unsigned HOST_WIDE_INT. */
3468 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3470 /* Next check the number of bytes to be written against the destination
3471 object size. */
3472 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3474 if (range[0]
3475 && TREE_CODE (range[0]) == INTEGER_CST
3476 && ((tree_fits_uhwi_p (dstsize)
3477 && tree_int_cst_lt (dstsize, range[0]))
3478 || (dstwrite
3479 && tree_fits_uhwi_p (dstwrite)
3480 && tree_int_cst_lt (dstwrite, range[0]))))
3482 if (TREE_NO_WARNING (exp))
3483 return false;
3485 location_t loc = tree_nonartificial_location (exp);
3486 loc = expansion_point_location_if_in_system_header (loc);
3488 bool warned = false;
3489 if (dstwrite == slen && at_least_one)
3491 /* This is a call to strcpy with a destination of 0 size
3492 and a source of unknown length. The call will write
3493 at least one byte past the end of the destination. */
3494 warned = (func
3495 ? warning_at (loc, opt,
3496 "%K%qD writing %E or more bytes into "
3497 "a region of size %E overflows "
3498 "the destination",
3499 exp, func, range[0], dstsize)
3500 : warning_at (loc, opt,
3501 "%Kwriting %E or more bytes into "
3502 "a region of size %E overflows "
3503 "the destination",
3504 exp, range[0], dstsize));
3506 else if (tree_int_cst_equal (range[0], range[1]))
3507 warned = (func
3508 ? warning_n (loc, opt, tree_to_uhwi (range[0]),
3509 "%K%qD writing %E byte into a region "
3510 "of size %E overflows the destination",
3511 "%K%qD writing %E bytes into a region "
3512 "of size %E overflows the destination",
3513 exp, func, range[0], dstsize)
3514 : warning_n (loc, opt, tree_to_uhwi (range[0]),
3515 "%Kwriting %E byte into a region "
3516 "of size %E overflows the destination",
3517 "%Kwriting %E bytes into a region "
3518 "of size %E overflows the destination",
3519 exp, range[0], dstsize));
3520 else if (tree_int_cst_sign_bit (range[1]))
3522 /* Avoid printing the upper bound if it's invalid. */
3523 warned = (func
3524 ? warning_at (loc, opt,
3525 "%K%qD writing %E or more bytes into "
3526 "a region of size %E overflows "
3527 "the destination",
3528 exp, func, range[0], dstsize)
3529 : warning_at (loc, opt,
3530 "%Kwriting %E or more bytes into "
3531 "a region of size %E overflows "
3532 "the destination",
3533 exp, range[0], dstsize));
3535 else
3536 warned = (func
3537 ? warning_at (loc, opt,
3538 "%K%qD writing between %E and %E bytes "
3539 "into a region of size %E overflows "
3540 "the destination",
3541 exp, func, range[0], range[1],
3542 dstsize)
3543 : warning_at (loc, opt,
3544 "%Kwriting between %E and %E bytes "
3545 "into a region of size %E overflows "
3546 "the destination",
3547 exp, range[0], range[1],
3548 dstsize));
3549 if (warned)
3550 TREE_NO_WARNING (exp) = true;
3552 /* Return error when an overflow has been detected. */
3553 return false;
3557 /* Check the maximum length of the source sequence against the size
3558 of the destination object if known, or against the maximum size
3559 of an object. */
3560 if (maxread)
3562 get_size_range (maxread, range);
3563 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3565 location_t loc = tree_nonartificial_location (exp);
3566 loc = expansion_point_location_if_in_system_header (loc);
3568 if (tree_int_cst_lt (maxobjsize, range[0]))
3570 if (TREE_NO_WARNING (exp))
3571 return false;
3573 bool warned = false;
3575 /* Warn about crazy big sizes first since that's more
3576 likely to be meaningful than saying that the bound
3577 is greater than the object size if both are big. */
3578 if (range[0] == range[1])
3579 warned = (func
3580 ? warning_at (loc, opt,
3581 "%K%qD specified bound %E "
3582 "exceeds maximum object size %E",
3583 exp, func, range[0], maxobjsize)
3584 : warning_at (loc, opt,
3585 "%Kspecified bound %E "
3586 "exceeds maximum object size %E",
3587 exp, range[0], maxobjsize));
3588 else
3589 warned = (func
3590 ? warning_at (loc, opt,
3591 "%K%qD specified bound between "
3592 "%E and %E exceeds maximum object "
3593 "size %E",
3594 exp, func,
3595 range[0], range[1], maxobjsize)
3596 : warning_at (loc, opt,
3597 "%Kspecified bound between "
3598 "%E and %E exceeds maximum object "
3599 "size %E",
3600 exp, range[0], range[1], maxobjsize));
3601 if (warned)
3602 TREE_NO_WARNING (exp) = true;
3604 return false;
3607 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3609 if (TREE_NO_WARNING (exp))
3610 return false;
3612 bool warned = false;
3614 if (tree_int_cst_equal (range[0], range[1]))
3615 warned = (func
3616 ? warning_at (loc, opt,
3617 "%K%qD specified bound %E "
3618 "exceeds destination size %E",
3619 exp, func,
3620 range[0], dstsize)
3621 : warning_at (loc, opt,
3622 "%Kspecified bound %E "
3623 "exceeds destination size %E",
3624 exp, range[0], dstsize));
3625 else
3626 warned = (func
3627 ? warning_at (loc, opt,
3628 "%K%qD specified bound between %E "
3629 "and %E exceeds destination size %E",
3630 exp, func,
3631 range[0], range[1], dstsize)
3632 : warning_at (loc, opt,
3633 "%Kspecified bound between %E "
3634 "and %E exceeds destination size %E",
3635 exp,
3636 range[0], range[1], dstsize));
3637 if (warned)
3638 TREE_NO_WARNING (exp) = true;
3640 return false;
3645 /* Check for reading past the end of SRC. */
3646 if (slen
3647 && slen == srcstr
3648 && dstwrite && range[0]
3649 && tree_int_cst_lt (slen, range[0]))
3651 if (TREE_NO_WARNING (exp))
3652 return false;
3654 bool warned = false;
3655 location_t loc = tree_nonartificial_location (exp);
3656 loc = expansion_point_location_if_in_system_header (loc);
3658 if (tree_int_cst_equal (range[0], range[1]))
3659 warned = (func
3660 ? warning_n (loc, opt, tree_to_uhwi (range[0]),
3661 "%K%qD reading %E byte from a region of size %E",
3662 "%K%qD reading %E bytes from a region of size %E",
3663 exp, func, range[0], slen)
3664 : warning_n (loc, opt, tree_to_uhwi (range[0]),
3665 "%Kreading %E byte from a region of size %E",
3666 "%Kreading %E bytes from a region of size %E",
3667 exp, range[0], slen));
3668 else if (tree_int_cst_sign_bit (range[1]))
3670 /* Avoid printing the upper bound if it's invalid. */
3671 warned = (func
3672 ? warning_at (loc, opt,
3673 "%K%qD reading %E or more bytes from a region "
3674 "of size %E",
3675 exp, func, range[0], slen)
3676 : warning_at (loc, opt,
3677 "%Kreading %E or more bytes from a region "
3678 "of size %E",
3679 exp, range[0], slen));
3681 else
3682 warned = (func
3683 ? warning_at (loc, opt,
3684 "%K%qD reading between %E and %E bytes from "
3685 "a region of size %E",
3686 exp, func, range[0], range[1], slen)
3687 : warning_at (loc, opt,
3688 "%Kreading between %E and %E bytes from "
3689 "a region of size %E",
3690 exp, range[0], range[1], slen));
3691 if (warned)
3692 TREE_NO_WARNING (exp) = true;
3694 return false;
3697 return true;
3700 /* If STMT is a call to an allocation function, returns the constant
3701 size of the object allocated by the call represented as sizetype.
3702 If nonnull, sets RNG1[] to the range of the size. */
3704 tree
3705 gimple_call_alloc_size (gimple *stmt, wide_int rng1[2] /* = NULL */,
3706 const vr_values *rvals /* = NULL */)
3708 if (!stmt)
3709 return NULL_TREE;
3711 tree allocfntype;
3712 if (tree fndecl = gimple_call_fndecl (stmt))
3713 allocfntype = TREE_TYPE (fndecl);
3714 else
3715 allocfntype = gimple_call_fntype (stmt);
3717 if (!allocfntype)
3718 return NULL_TREE;
3720 unsigned argidx1 = UINT_MAX, argidx2 = UINT_MAX;
3721 tree at = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (allocfntype));
3722 if (!at)
3724 if (!gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
3725 return NULL_TREE;
3727 argidx1 = 0;
3730 unsigned nargs = gimple_call_num_args (stmt);
3732 if (argidx1 == UINT_MAX)
3734 tree atval = TREE_VALUE (at);
3735 if (!atval)
3736 return NULL_TREE;
3738 argidx1 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
3739 if (nargs <= argidx1)
3740 return NULL_TREE;
3742 atval = TREE_CHAIN (atval);
3743 if (atval)
3745 argidx2 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
3746 if (nargs <= argidx2)
3747 return NULL_TREE;
3751 tree size = gimple_call_arg (stmt, argidx1);
3753 wide_int rng1_buf[2];
3754 /* If RNG1 is not set, use the buffer. */
3755 if (!rng1)
3756 rng1 = rng1_buf;
3758 if (!get_range (size, rng1, rvals))
3759 return NULL_TREE;
3761 if (argidx2 > nargs && TREE_CODE (size) == INTEGER_CST)
3762 return fold_convert (sizetype, size);
3764 /* To handle ranges do the math in wide_int and return the product
3765 of the upper bounds as a constant. Ignore anti-ranges. */
3766 tree n = argidx2 < nargs ? gimple_call_arg (stmt, argidx2) : integer_one_node;
3767 wide_int rng2[2];
3768 if (!get_range (n, rng2, rvals))
3769 return NULL_TREE;
3771 /* Extend to the maximum precision to avoid overflow. */
3772 const int prec = ADDR_MAX_PRECISION;
3773 rng1[0] = wide_int::from (rng1[0], prec, UNSIGNED);
3774 rng1[1] = wide_int::from (rng1[1], prec, UNSIGNED);
3775 rng2[0] = wide_int::from (rng2[0], prec, UNSIGNED);
3776 rng2[1] = wide_int::from (rng2[1], prec, UNSIGNED);
3778 /* Compute products of both bounds for the caller but return the lesser
3779 of SIZE_MAX and the product of the upper bounds as a constant. */
3780 rng1[0] = rng1[0] * rng2[0];
3781 rng1[1] = rng1[1] * rng2[1];
3782 tree size_max = TYPE_MAX_VALUE (sizetype);
3783 if (wi::gtu_p (rng1[1], wi::to_wide (size_max, prec)))
3785 rng1[1] = wi::to_wide (size_max);
3786 return size_max;
3789 return wide_int_to_tree (sizetype, rng1[1]);
3792 /* Helper for compute_objsize. Returns the constant size of the DEST
3793 if it refers to a variable or field and sets *PDECL to the DECL and
3794 *POFF to zero. Otherwise returns null for other nodes. */
3796 static tree
3797 addr_decl_size (tree dest, tree *pdecl, tree *poff)
3799 if (TREE_CODE (dest) == ADDR_EXPR)
3800 dest = TREE_OPERAND (dest, 0);
3802 if (DECL_P (dest))
3804 *pdecl = dest;
3805 *poff = integer_zero_node;
3806 if (tree size = DECL_SIZE_UNIT (dest))
3807 return TREE_CODE (size) == INTEGER_CST ? size : NULL_TREE;
3810 if (TREE_CODE (dest) == COMPONENT_REF)
3812 *pdecl = TREE_OPERAND (dest, 1);
3813 *poff = integer_zero_node;
3814 /* Only return constant sizes for now while callers depend on it. */
3815 if (tree size = component_ref_size (dest))
3816 return TREE_CODE (size) == INTEGER_CST ? size : NULL_TREE;
3819 return NULL_TREE;
3822 /* Helper to compute the size of the object referenced by the DEST
3823 expression which must have pointer type, using Object Size type
3824 OSTYPE (only the least significant 2 bits are used).
3825 Returns an estimate of the size of the object represented as
3826 a sizetype constant if successful or NULL when the size cannot
3827 be determined.
3828 When the referenced object involves a non-constant offset in some
3829 range the returned value represents the largest size given the
3830 smallest non-negative offset in the range.
3831 If nonnull, sets *PDECL to the decl of the referenced subobject
3832 if it can be determined, or to null otherwise. Likewise, when
3833 POFF is nonnull *POFF is set to the offset into *PDECL.
3835 The function is intended for diagnostics and should not be used
3836 to influence code generation or optimization. */
3838 tree
3839 compute_objsize (tree dest, int ostype, tree *pdecl /* = NULL */,
3840 tree *poff /* = NULL */, const vr_values *rvals /* = NULL */)
3842 tree dummy_decl = NULL_TREE;
3843 if (!pdecl)
3844 pdecl = &dummy_decl;
3846 tree dummy_off = NULL_TREE;
3847 if (!poff)
3848 poff = &dummy_off;
3850 /* Only the two least significant bits are meaningful. */
3851 ostype &= 3;
3853 if (ostype)
3854 /* Except for overly permissive calls to memcpy and other raw
3855 memory functions with zero OSTYPE, detect the size from simple
3856 DECLs first to more reliably than compute_builtin_object_size
3857 set *PDECL and *POFF. */
3858 if (tree size = addr_decl_size (dest, pdecl, poff))
3859 return size;
3861 unsigned HOST_WIDE_INT size;
3862 if (compute_builtin_object_size (dest, ostype, &size, pdecl, poff))
3863 return build_int_cst (sizetype, size);
3865 if (TREE_CODE (dest) == SSA_NAME)
3867 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3868 if (is_gimple_call (stmt))
3870 /* If STMT is a call to an allocation function get the size
3871 from its argument(s). If successful, also set *PDECL to
3872 DEST for the caller to include in diagnostics. */
3873 if (tree size = gimple_call_alloc_size (stmt))
3875 *pdecl = dest;
3876 *poff = integer_zero_node;
3877 return size;
3879 return NULL_TREE;
3882 if (!is_gimple_assign (stmt))
3883 return NULL_TREE;
3885 dest = gimple_assign_rhs1 (stmt);
3887 tree_code code = gimple_assign_rhs_code (stmt);
3888 if (code == POINTER_PLUS_EXPR)
3890 /* compute_builtin_object_size fails for addresses with
3891 non-constant offsets. Try to determine the range of
3892 such an offset here and use it to adjust the constant
3893 size. */
3894 tree off = gimple_assign_rhs2 (stmt);
3895 if (TREE_CODE (off) == INTEGER_CST)
3897 if (tree size = compute_objsize (dest, ostype, pdecl, poff))
3899 wide_int wioff = wi::to_wide (off);
3900 wide_int wisiz = wi::to_wide (size);
3902 /* Ignore negative offsets for now. For others,
3903 use the lower bound as the most optimistic
3904 estimate of the (remaining) size. */
3905 if (wi::neg_p (wioff))
3907 else
3909 if (*poff)
3911 *poff = fold_convert (ptrdiff_type_node, *poff);
3912 off = fold_convert (ptrdiff_type_node, *poff);
3913 *poff = size_binop (PLUS_EXPR, *poff, off);
3915 else
3916 *poff = off;
3917 if (wi::ltu_p (wioff, wisiz))
3918 return wide_int_to_tree (TREE_TYPE (size),
3919 wi::sub (wisiz, wioff));
3920 return size_zero_node;
3924 else if (TREE_CODE (off) == SSA_NAME
3925 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3927 wide_int min, max;
3928 enum value_range_kind rng = get_range_info (off, &min, &max);
3930 if (rng == VR_RANGE)
3931 if (tree size = compute_objsize (dest, ostype, pdecl, poff))
3933 wide_int wisiz = wi::to_wide (size);
3935 /* Ignore negative offsets for now. For others,
3936 use the lower bound as the most optimistic
3937 estimate of the (remaining)size. */
3938 if (wi::neg_p (min) || wi::neg_p (max))
3940 else
3942 /* FIXME: For now, since the offset is non-constant,
3943 clear *POFF to keep it from being "misused."
3944 Eventually *POFF will need to become a range that
3945 can be properly added to the outer offset if it
3946 too is one. */
3947 *poff = NULL_TREE;
3948 if (wi::ltu_p (min, wisiz))
3949 return wide_int_to_tree (TREE_TYPE (size),
3950 wi::sub (wisiz, min));
3951 return size_zero_node;
3956 else if (code != ADDR_EXPR)
3957 return NULL_TREE;
3960 /* Unless computing the largest size (for memcpy and other raw memory
3961 functions), try to determine the size of the object from its type. */
3962 if (!ostype)
3963 return NULL_TREE;
3965 if (TREE_CODE (dest) == ARRAY_REF
3966 || TREE_CODE (dest) == MEM_REF)
3968 tree ref = TREE_OPERAND (dest, 0);
3969 tree reftype = TREE_TYPE (ref);
3970 if (TREE_CODE (dest) == MEM_REF && TREE_CODE (reftype) == POINTER_TYPE)
3972 /* Give up for MEM_REFs of vector types; those may be synthesized
3973 from multiple assignments to consecutive data members. See PR
3974 93200.
3975 FIXME: Deal with this more generally, e.g., by marking up such
3976 MEM_REFs at the time they're created. */
3977 reftype = TREE_TYPE (reftype);
3978 if (TREE_CODE (reftype) == VECTOR_TYPE)
3979 return NULL_TREE;
3981 tree off = TREE_OPERAND (dest, 1);
3982 if (tree size = compute_objsize (ref, ostype, pdecl, poff))
3984 /* If the declaration of the destination object is known
3985 to have zero size, return zero. */
3986 if (integer_zerop (size)
3987 && *pdecl && DECL_P (*pdecl)
3988 && *poff && integer_zerop (*poff))
3989 return size_zero_node;
3991 /* A valid offset into a declared object cannot be negative.
3992 A zero size with a zero "inner" offset is still zero size
3993 regardless of the "other" offset OFF. */
3994 if (*poff
3995 && ((integer_zerop (*poff) && integer_zerop (size))
3996 || (TREE_CODE (*poff) == INTEGER_CST
3997 && tree_int_cst_sgn (*poff) < 0)))
3998 return size_zero_node;
4000 wide_int offrng[2];
4001 if (!get_range (off, offrng, rvals))
4002 return NULL_TREE;
4004 /* Convert to the same precision to keep wide_int from "helpfully"
4005 crashing whenever it sees other arguments. */
4006 const unsigned sizprec = TYPE_PRECISION (sizetype);
4007 offrng[0] = wide_int::from (offrng[0], sizprec, SIGNED);
4008 offrng[1] = wide_int::from (offrng[1], sizprec, SIGNED);
4010 /* Adjust SIZE either up or down by the sum of *POFF and OFF
4011 above. */
4012 if (TREE_CODE (dest) == ARRAY_REF)
4014 tree lowbnd = array_ref_low_bound (dest);
4015 if (!integer_zerop (lowbnd) && tree_fits_uhwi_p (lowbnd))
4017 /* Adjust the offset by the low bound of the array
4018 domain (normally zero but 1 in Fortran). */
4019 unsigned HOST_WIDE_INT lb = tree_to_uhwi (lowbnd);
4020 offrng[0] -= lb;
4021 offrng[1] -= lb;
4024 /* Convert the array index into a byte offset. */
4025 tree eltype = TREE_TYPE (dest);
4026 tree tpsize = TYPE_SIZE_UNIT (eltype);
4027 if (tpsize && TREE_CODE (tpsize) == INTEGER_CST)
4029 wide_int wsz = wi::to_wide (tpsize, offrng->get_precision ());
4030 offrng[0] *= wsz;
4031 offrng[1] *= wsz;
4033 else
4034 return NULL_TREE;
4037 wide_int wisize = wi::to_wide (size);
4039 if (!*poff)
4041 /* If the "inner" offset is unknown and the "outer" offset
4042 is either negative or less than SIZE, return the size
4043 minus the offset. This may be overly optimistic in
4044 the first case if the inner offset happens to be less
4045 than the absolute value of the outer offset. */
4046 if (wi::neg_p (offrng[0]))
4047 return size;
4048 if (wi::ltu_p (offrng[0], wisize))
4049 return build_int_cst (sizetype, (wisize - offrng[0]).to_uhwi ());
4050 return size_zero_node;
4053 /* Convert to the same precision to keep wide_int from "helpfuly"
4054 crashing whenever it sees other argumments. */
4055 offrng[0] = wide_int::from (offrng[0], sizprec, SIGNED);
4056 offrng[1] = wide_int::from (offrng[1], sizprec, SIGNED);
4058 tree dstoff = *poff;
4059 if (integer_zerop (*poff))
4060 *poff = off;
4061 else if (!integer_zerop (off))
4063 *poff = fold_convert (ptrdiff_type_node, *poff);
4064 off = fold_convert (ptrdiff_type_node, off);
4065 *poff = size_binop (PLUS_EXPR, *poff, off);
4068 if (!wi::neg_p (offrng[0]))
4070 if (TREE_CODE (size) != INTEGER_CST)
4071 return NULL_TREE;
4073 /* Return the difference between the size and the offset
4074 or zero if the offset is greater. */
4075 wide_int wisize = wi::to_wide (size, sizprec);
4076 if (wi::ltu_p (wisize, offrng[0]))
4077 return size_zero_node;
4079 return wide_int_to_tree (sizetype, wisize - offrng[0]);
4082 wide_int dstoffrng[2];
4083 if (TREE_CODE (dstoff) == INTEGER_CST)
4084 dstoffrng[0] = dstoffrng[1] = wi::to_wide (dstoff);
4085 else if (TREE_CODE (dstoff) == SSA_NAME)
4087 enum value_range_kind rng
4088 = get_range_info (dstoff, dstoffrng, dstoffrng + 1);
4089 if (rng != VR_RANGE)
4090 return NULL_TREE;
4092 else
4093 return NULL_TREE;
4095 dstoffrng[0] = wide_int::from (dstoffrng[0], sizprec, SIGNED);
4096 dstoffrng[1] = wide_int::from (dstoffrng[1], sizprec, SIGNED);
4098 if (!wi::neg_p (dstoffrng[0]))
4099 wisize += dstoffrng[0];
4101 offrng[1] += dstoffrng[1];
4102 if (wi::neg_p (offrng[1]))
4103 return size_zero_node;
4105 return wide_int_to_tree (sizetype, wisize);
4108 return NULL_TREE;
4111 /* Try simple DECLs not handled above. */
4112 if (tree size = addr_decl_size (dest, pdecl, poff))
4113 return size;
4115 tree type = TREE_TYPE (dest);
4116 if (TREE_CODE (type) == POINTER_TYPE)
4117 type = TREE_TYPE (type);
4119 type = TYPE_MAIN_VARIANT (type);
4120 if (TREE_CODE (dest) == ADDR_EXPR)
4121 dest = TREE_OPERAND (dest, 0);
4123 if (TREE_CODE (type) == ARRAY_TYPE
4124 && !array_at_struct_end_p (dest))
4126 if (tree size = TYPE_SIZE_UNIT (type))
4127 return TREE_CODE (size) == INTEGER_CST ? size : NULL_TREE;
4130 return NULL_TREE;
4133 /* Helper to determine and check the sizes of the source and the destination
4134 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
4135 call expression, DEST is the destination argument, SRC is the source
4136 argument or null, and LEN is the number of bytes. Use Object Size type-0
4137 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
4138 (no overflow or invalid sizes), false otherwise. */
4140 static bool
4141 check_memop_access (tree exp, tree dest, tree src, tree size)
4143 /* For functions like memset and memcpy that operate on raw memory
4144 try to determine the size of the largest source and destination
4145 object using type-0 Object Size regardless of the object size
4146 type specified by the option. */
4147 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
4148 tree dstsize = compute_objsize (dest, 0);
4150 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
4151 srcsize, dstsize);
4154 /* Validate memchr arguments without performing any expansion.
4155 Return NULL_RTX. */
4157 static rtx
4158 expand_builtin_memchr (tree exp, rtx)
4160 if (!validate_arglist (exp,
4161 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4162 return NULL_RTX;
4164 tree arg1 = CALL_EXPR_ARG (exp, 0);
4165 tree len = CALL_EXPR_ARG (exp, 2);
4167 /* Diagnose calls where the specified length exceeds the size
4168 of the object. */
4169 if (warn_stringop_overflow)
4171 tree size = compute_objsize (arg1, 0);
4172 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
4173 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
4176 return NULL_RTX;
4179 /* Expand a call EXP to the memcpy builtin.
4180 Return NULL_RTX if we failed, the caller should emit a normal call,
4181 otherwise try to get the result in TARGET, if convenient (and in
4182 mode MODE if that's convenient). */
4184 static rtx
4185 expand_builtin_memcpy (tree exp, rtx target)
4187 if (!validate_arglist (exp,
4188 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4189 return NULL_RTX;
4191 tree dest = CALL_EXPR_ARG (exp, 0);
4192 tree src = CALL_EXPR_ARG (exp, 1);
4193 tree len = CALL_EXPR_ARG (exp, 2);
4195 check_memop_access (exp, dest, src, len);
4197 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
4198 /*retmode=*/ RETURN_BEGIN, false);
4201 /* Check a call EXP to the memmove built-in for validity.
4202 Return NULL_RTX on both success and failure. */
4204 static rtx
4205 expand_builtin_memmove (tree exp, rtx target)
4207 if (!validate_arglist (exp,
4208 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4209 return NULL_RTX;
4211 tree dest = CALL_EXPR_ARG (exp, 0);
4212 tree src = CALL_EXPR_ARG (exp, 1);
4213 tree len = CALL_EXPR_ARG (exp, 2);
4215 check_memop_access (exp, dest, src, len);
4217 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
4218 /*retmode=*/ RETURN_BEGIN, true);
4221 /* Expand a call EXP to the mempcpy builtin.
4222 Return NULL_RTX if we failed; the caller should emit a normal call,
4223 otherwise try to get the result in TARGET, if convenient (and in
4224 mode MODE if that's convenient). */
4226 static rtx
4227 expand_builtin_mempcpy (tree exp, rtx target)
4229 if (!validate_arglist (exp,
4230 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4231 return NULL_RTX;
4233 tree dest = CALL_EXPR_ARG (exp, 0);
4234 tree src = CALL_EXPR_ARG (exp, 1);
4235 tree len = CALL_EXPR_ARG (exp, 2);
4237 /* Policy does not generally allow using compute_objsize (which
4238 is used internally by check_memop_size) to change code generation
4239 or drive optimization decisions.
4241 In this instance it is safe because the code we generate has
4242 the same semantics regardless of the return value of
4243 check_memop_sizes. Exactly the same amount of data is copied
4244 and the return value is exactly the same in both cases.
4246 Furthermore, check_memop_size always uses mode 0 for the call to
4247 compute_objsize, so the imprecise nature of compute_objsize is
4248 avoided. */
4250 /* Avoid expanding mempcpy into memcpy when the call is determined
4251 to overflow the buffer. This also prevents the same overflow
4252 from being diagnosed again when expanding memcpy. */
4253 if (!check_memop_access (exp, dest, src, len))
4254 return NULL_RTX;
4256 return expand_builtin_mempcpy_args (dest, src, len,
4257 target, exp, /*retmode=*/ RETURN_END);
4260 /* Helper function to do the actual work for expand of memory copy family
4261 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
4262 of memory from SRC to DEST and assign to TARGET if convenient. Return
4263 value is based on RETMODE argument. */
4265 static rtx
4266 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
4267 rtx target, tree exp, memop_ret retmode,
4268 bool might_overlap)
4270 const char *src_str;
4271 unsigned int src_align = get_pointer_alignment (src);
4272 unsigned int dest_align = get_pointer_alignment (dest);
4273 rtx dest_mem, src_mem, dest_addr, len_rtx;
4274 HOST_WIDE_INT expected_size = -1;
4275 unsigned int expected_align = 0;
4276 unsigned HOST_WIDE_INT min_size;
4277 unsigned HOST_WIDE_INT max_size;
4278 unsigned HOST_WIDE_INT probable_max_size;
4280 bool is_move_done;
4282 /* If DEST is not a pointer type, call the normal function. */
4283 if (dest_align == 0)
4284 return NULL_RTX;
4286 /* If either SRC is not a pointer type, don't do this
4287 operation in-line. */
4288 if (src_align == 0)
4289 return NULL_RTX;
4291 if (currently_expanding_gimple_stmt)
4292 stringop_block_profile (currently_expanding_gimple_stmt,
4293 &expected_align, &expected_size);
4295 if (expected_align < dest_align)
4296 expected_align = dest_align;
4297 dest_mem = get_memory_rtx (dest, len);
4298 set_mem_align (dest_mem, dest_align);
4299 len_rtx = expand_normal (len);
4300 determine_block_size (len, len_rtx, &min_size, &max_size,
4301 &probable_max_size);
4302 src_str = c_getstr (src);
4304 /* If SRC is a string constant and block move would be done by
4305 pieces, we can avoid loading the string from memory and only
4306 stored the computed constants. This works in the overlap
4307 (memmove) case as well because store_by_pieces just generates a
4308 series of stores of constants from the string constant returned
4309 by c_getstr(). */
4310 if (src_str
4311 && CONST_INT_P (len_rtx)
4312 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
4313 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
4314 CONST_CAST (char *, src_str),
4315 dest_align, false))
4317 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
4318 builtin_memcpy_read_str,
4319 CONST_CAST (char *, src_str),
4320 dest_align, false, retmode);
4321 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4322 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4323 return dest_mem;
4326 src_mem = get_memory_rtx (src, len);
4327 set_mem_align (src_mem, src_align);
4329 /* Copy word part most expediently. */
4330 enum block_op_methods method = BLOCK_OP_NORMAL;
4331 if (CALL_EXPR_TAILCALL (exp)
4332 && (retmode == RETURN_BEGIN || target == const0_rtx))
4333 method = BLOCK_OP_TAILCALL;
4334 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
4335 && retmode == RETURN_END
4336 && !might_overlap
4337 && target != const0_rtx);
4338 if (use_mempcpy_call)
4339 method = BLOCK_OP_NO_LIBCALL_RET;
4340 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
4341 expected_align, expected_size,
4342 min_size, max_size, probable_max_size,
4343 use_mempcpy_call, &is_move_done, might_overlap);
4345 /* Bail out when a mempcpy call would be expanded as libcall and when
4346 we have a target that provides a fast implementation
4347 of mempcpy routine. */
4348 if (!is_move_done)
4349 return NULL_RTX;
4351 if (dest_addr == pc_rtx)
4352 return NULL_RTX;
4354 if (dest_addr == 0)
4356 dest_addr = force_operand (XEXP (dest_mem, 0), target);
4357 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4360 if (retmode != RETURN_BEGIN && target != const0_rtx)
4362 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
4363 /* stpcpy pointer to last byte. */
4364 if (retmode == RETURN_END_MINUS_ONE)
4365 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
4368 return dest_addr;
4371 static rtx
4372 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
4373 rtx target, tree orig_exp, memop_ret retmode)
4375 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
4376 retmode, false);
4379 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
4380 we failed, the caller should emit a normal call, otherwise try to
4381 get the result in TARGET, if convenient.
4382 Return value is based on RETMODE argument. */
4384 static rtx
4385 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
4387 class expand_operand ops[3];
4388 rtx dest_mem;
4389 rtx src_mem;
4391 if (!targetm.have_movstr ())
4392 return NULL_RTX;
4394 dest_mem = get_memory_rtx (dest, NULL);
4395 src_mem = get_memory_rtx (src, NULL);
4396 if (retmode == RETURN_BEGIN)
4398 target = force_reg (Pmode, XEXP (dest_mem, 0));
4399 dest_mem = replace_equiv_address (dest_mem, target);
4402 create_output_operand (&ops[0],
4403 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
4404 create_fixed_operand (&ops[1], dest_mem);
4405 create_fixed_operand (&ops[2], src_mem);
4406 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
4407 return NULL_RTX;
4409 if (retmode != RETURN_BEGIN && target != const0_rtx)
4411 target = ops[0].value;
4412 /* movstr is supposed to set end to the address of the NUL
4413 terminator. If the caller requested a mempcpy-like return value,
4414 adjust it. */
4415 if (retmode == RETURN_END)
4417 rtx tem = plus_constant (GET_MODE (target),
4418 gen_lowpart (GET_MODE (target), target), 1);
4419 emit_move_insn (target, force_operand (tem, NULL_RTX));
4422 return target;
4425 /* Do some very basic size validation of a call to the strcpy builtin
4426 given by EXP. Return NULL_RTX to have the built-in expand to a call
4427 to the library function. */
4429 static rtx
4430 expand_builtin_strcat (tree exp)
4432 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
4433 || !warn_stringop_overflow)
4434 return NULL_RTX;
4436 tree dest = CALL_EXPR_ARG (exp, 0);
4437 tree src = CALL_EXPR_ARG (exp, 1);
4439 /* Detect unterminated source (only). */
4440 if (!check_nul_terminated_array (exp, src))
4441 return NULL_RTX;
4443 /* There is no way here to determine the length of the string in
4444 the destination to which the SRC string is being appended so
4445 just diagnose cases when the souce string is longer than
4446 the destination object. */
4448 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4450 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
4451 destsize);
4453 return NULL_RTX;
4456 /* Expand expression EXP, which is a call to the strcpy builtin. Return
4457 NULL_RTX if we failed the caller should emit a normal call, otherwise
4458 try to get the result in TARGET, if convenient (and in mode MODE if that's
4459 convenient). */
4461 static rtx
4462 expand_builtin_strcpy (tree exp, rtx target)
4464 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4465 return NULL_RTX;
4467 tree dest = CALL_EXPR_ARG (exp, 0);
4468 tree src = CALL_EXPR_ARG (exp, 1);
4470 if (warn_stringop_overflow)
4472 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4473 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4474 src, destsize);
4477 if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
4479 /* Check to see if the argument was declared attribute nonstring
4480 and if so, issue a warning since at this point it's not known
4481 to be nul-terminated. */
4482 tree fndecl = get_callee_fndecl (exp);
4483 maybe_warn_nonstring_arg (fndecl, exp);
4484 return ret;
4487 return NULL_RTX;
4490 /* Helper function to do the actual work for expand_builtin_strcpy. The
4491 arguments to the builtin_strcpy call DEST and SRC are broken out
4492 so that this can also be called without constructing an actual CALL_EXPR.
4493 The other arguments and return value are the same as for
4494 expand_builtin_strcpy. */
4496 static rtx
4497 expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target)
4499 /* Detect strcpy calls with unterminated arrays.. */
4500 if (tree nonstr = unterminated_array (src))
4502 /* NONSTR refers to the non-nul terminated constant array. */
4503 if (!TREE_NO_WARNING (exp))
4504 warn_string_no_nul (EXPR_LOCATION (exp), "strcpy", src, nonstr);
4505 return NULL_RTX;
4508 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
4511 /* Expand a call EXP to the stpcpy builtin.
4512 Return NULL_RTX if we failed the caller should emit a normal call,
4513 otherwise try to get the result in TARGET, if convenient (and in
4514 mode MODE if that's convenient). */
4516 static rtx
4517 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
4519 tree dst, src;
4520 location_t loc = EXPR_LOCATION (exp);
4522 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4523 return NULL_RTX;
4525 dst = CALL_EXPR_ARG (exp, 0);
4526 src = CALL_EXPR_ARG (exp, 1);
4528 if (warn_stringop_overflow)
4530 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
4531 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4532 src, destsize);
4535 /* If return value is ignored, transform stpcpy into strcpy. */
4536 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
4538 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
4539 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
4540 return expand_expr (result, target, mode, EXPAND_NORMAL);
4542 else
4544 tree len, lenp1;
4545 rtx ret;
4547 /* Ensure we get an actual string whose length can be evaluated at
4548 compile-time, not an expression containing a string. This is
4549 because the latter will potentially produce pessimized code
4550 when used to produce the return value. */
4551 c_strlen_data lendata = { };
4552 if (!c_getstr (src, NULL)
4553 || !(len = c_strlen (src, 0, &lendata, 1)))
4554 return expand_movstr (dst, src, target,
4555 /*retmode=*/ RETURN_END_MINUS_ONE);
4557 if (lendata.decl && !TREE_NO_WARNING (exp))
4558 warn_string_no_nul (EXPR_LOCATION (exp), "stpcpy", src, lendata.decl);
4560 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
4561 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
4562 target, exp,
4563 /*retmode=*/ RETURN_END_MINUS_ONE);
4565 if (ret)
4566 return ret;
4568 if (TREE_CODE (len) == INTEGER_CST)
4570 rtx len_rtx = expand_normal (len);
4572 if (CONST_INT_P (len_rtx))
4574 ret = expand_builtin_strcpy_args (exp, dst, src, target);
4576 if (ret)
4578 if (! target)
4580 if (mode != VOIDmode)
4581 target = gen_reg_rtx (mode);
4582 else
4583 target = gen_reg_rtx (GET_MODE (ret));
4585 if (GET_MODE (target) != GET_MODE (ret))
4586 ret = gen_lowpart (GET_MODE (target), ret);
4588 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
4589 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
4590 gcc_assert (ret);
4592 return target;
4597 return expand_movstr (dst, src, target,
4598 /*retmode=*/ RETURN_END_MINUS_ONE);
4602 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
4603 arguments while being careful to avoid duplicate warnings (which could
4604 be issued if the expander were to expand the call, resulting in it
4605 being emitted in expand_call(). */
4607 static rtx
4608 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
4610 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
4612 /* The call has been successfully expanded. Check for nonstring
4613 arguments and issue warnings as appropriate. */
4614 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
4615 return ret;
4618 return NULL_RTX;
4621 /* Check a call EXP to the stpncpy built-in for validity.
4622 Return NULL_RTX on both success and failure. */
4624 static rtx
4625 expand_builtin_stpncpy (tree exp, rtx)
4627 if (!validate_arglist (exp,
4628 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4629 || !warn_stringop_overflow)
4630 return NULL_RTX;
4632 /* The source and destination of the call. */
4633 tree dest = CALL_EXPR_ARG (exp, 0);
4634 tree src = CALL_EXPR_ARG (exp, 1);
4636 /* The exact number of bytes to write (not the maximum). */
4637 tree len = CALL_EXPR_ARG (exp, 2);
4638 if (!check_nul_terminated_array (exp, src, len))
4639 return NULL_RTX;
4641 /* The size of the destination object. */
4642 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4644 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
4646 return NULL_RTX;
4649 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4650 bytes from constant string DATA + OFFSET and return it as target
4651 constant. */
4654 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
4655 scalar_int_mode mode)
4657 const char *str = (const char *) data;
4659 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4660 return const0_rtx;
4662 return c_readstr (str + offset, mode);
4665 /* Helper to check the sizes of sequences and the destination of calls
4666 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4667 success (no overflow or invalid sizes), false otherwise. */
4669 static bool
4670 check_strncat_sizes (tree exp, tree objsize)
4672 tree dest = CALL_EXPR_ARG (exp, 0);
4673 tree src = CALL_EXPR_ARG (exp, 1);
4674 tree maxread = CALL_EXPR_ARG (exp, 2);
4676 /* Try to determine the range of lengths that the source expression
4677 refers to. */
4678 c_strlen_data lendata = { };
4679 get_range_strlen (src, &lendata, /* eltsize = */ 1);
4681 /* Try to verify that the destination is big enough for the shortest
4682 string. */
4684 if (!objsize && warn_stringop_overflow)
4686 /* If it hasn't been provided by __strncat_chk, try to determine
4687 the size of the destination object into which the source is
4688 being copied. */
4689 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
4692 /* Add one for the terminating nul. */
4693 tree srclen = (lendata.minlen
4694 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
4695 size_one_node)
4696 : NULL_TREE);
4698 /* The strncat function copies at most MAXREAD bytes and always appends
4699 the terminating nul so the specified upper bound should never be equal
4700 to (or greater than) the size of the destination. */
4701 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4702 && tree_int_cst_equal (objsize, maxread))
4704 location_t loc = tree_nonartificial_location (exp);
4705 loc = expansion_point_location_if_in_system_header (loc);
4707 warning_at (loc, OPT_Wstringop_overflow_,
4708 "%K%qD specified bound %E equals destination size",
4709 exp, get_callee_fndecl (exp), maxread);
4711 return false;
4714 if (!srclen
4715 || (maxread && tree_fits_uhwi_p (maxread)
4716 && tree_fits_uhwi_p (srclen)
4717 && tree_int_cst_lt (maxread, srclen)))
4718 srclen = maxread;
4720 /* The number of bytes to write is LEN but check_access will also
4721 check SRCLEN if LEN's value isn't known. */
4722 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4723 objsize);
4726 /* Similar to expand_builtin_strcat, do some very basic size validation
4727 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4728 the built-in expand to a call to the library function. */
4730 static rtx
4731 expand_builtin_strncat (tree exp, rtx)
4733 if (!validate_arglist (exp,
4734 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4735 || !warn_stringop_overflow)
4736 return NULL_RTX;
4738 tree dest = CALL_EXPR_ARG (exp, 0);
4739 tree src = CALL_EXPR_ARG (exp, 1);
4740 /* The upper bound on the number of bytes to write. */
4741 tree maxread = CALL_EXPR_ARG (exp, 2);
4743 /* Detect unterminated source (only). */
4744 if (!check_nul_terminated_array (exp, src, maxread))
4745 return NULL_RTX;
4747 /* The length of the source sequence. */
4748 tree slen = c_strlen (src, 1);
4750 /* Try to determine the range of lengths that the source expression
4751 refers to. Since the lengths are only used for warning and not
4752 for code generation disable strict mode below. */
4753 tree maxlen = slen;
4754 if (!maxlen)
4756 c_strlen_data lendata = { };
4757 get_range_strlen (src, &lendata, /* eltsize = */ 1);
4758 maxlen = lendata.maxbound;
4761 /* Try to verify that the destination is big enough for the shortest
4762 string. First try to determine the size of the destination object
4763 into which the source is being copied. */
4764 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4766 /* Add one for the terminating nul. */
4767 tree srclen = (maxlen
4768 ? fold_build2 (PLUS_EXPR, size_type_node, maxlen,
4769 size_one_node)
4770 : NULL_TREE);
4772 /* The strncat function copies at most MAXREAD bytes and always appends
4773 the terminating nul so the specified upper bound should never be equal
4774 to (or greater than) the size of the destination. */
4775 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4776 && tree_int_cst_equal (destsize, maxread))
4778 location_t loc = tree_nonartificial_location (exp);
4779 loc = expansion_point_location_if_in_system_header (loc);
4781 warning_at (loc, OPT_Wstringop_overflow_,
4782 "%K%qD specified bound %E equals destination size",
4783 exp, get_callee_fndecl (exp), maxread);
4785 return NULL_RTX;
4788 if (!srclen
4789 || (maxread && tree_fits_uhwi_p (maxread)
4790 && tree_fits_uhwi_p (srclen)
4791 && tree_int_cst_lt (maxread, srclen)))
4792 srclen = maxread;
4794 /* The number of bytes to write is SRCLEN. */
4795 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
4797 return NULL_RTX;
4800 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4801 NULL_RTX if we failed the caller should emit a normal call. */
4803 static rtx
4804 expand_builtin_strncpy (tree exp, rtx target)
4806 location_t loc = EXPR_LOCATION (exp);
4808 if (!validate_arglist (exp,
4809 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4810 return NULL_RTX;
4811 tree dest = CALL_EXPR_ARG (exp, 0);
4812 tree src = CALL_EXPR_ARG (exp, 1);
4813 /* The number of bytes to write (not the maximum). */
4814 tree len = CALL_EXPR_ARG (exp, 2);
4816 if (!check_nul_terminated_array (exp, src, len))
4817 return NULL_RTX;
4819 /* The length of the source sequence. */
4820 tree slen = c_strlen (src, 1);
4822 if (warn_stringop_overflow)
4824 tree destsize = compute_objsize (dest,
4825 warn_stringop_overflow - 1);
4827 /* The number of bytes to write is LEN but check_access will also
4828 check SLEN if LEN's value isn't known. */
4829 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4830 destsize);
4833 /* We must be passed a constant len and src parameter. */
4834 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4835 return NULL_RTX;
4837 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4839 /* We're required to pad with trailing zeros if the requested
4840 len is greater than strlen(s2)+1. In that case try to
4841 use store_by_pieces, if it fails, punt. */
4842 if (tree_int_cst_lt (slen, len))
4844 unsigned int dest_align = get_pointer_alignment (dest);
4845 const char *p = c_getstr (src);
4846 rtx dest_mem;
4848 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4849 || !can_store_by_pieces (tree_to_uhwi (len),
4850 builtin_strncpy_read_str,
4851 CONST_CAST (char *, p),
4852 dest_align, false))
4853 return NULL_RTX;
4855 dest_mem = get_memory_rtx (dest, len);
4856 store_by_pieces (dest_mem, tree_to_uhwi (len),
4857 builtin_strncpy_read_str,
4858 CONST_CAST (char *, p), dest_align, false,
4859 RETURN_BEGIN);
4860 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4861 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4862 return dest_mem;
4865 return NULL_RTX;
4868 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4869 bytes from constant string DATA + OFFSET and return it as target
4870 constant. */
4873 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4874 scalar_int_mode mode)
4876 const char *c = (const char *) data;
4877 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4879 memset (p, *c, GET_MODE_SIZE (mode));
4881 return c_readstr (p, mode);
4884 /* Callback routine for store_by_pieces. Return the RTL of a register
4885 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4886 char value given in the RTL register data. For example, if mode is
4887 4 bytes wide, return the RTL for 0x01010101*data. */
4889 static rtx
4890 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4891 scalar_int_mode mode)
4893 rtx target, coeff;
4894 size_t size;
4895 char *p;
4897 size = GET_MODE_SIZE (mode);
4898 if (size == 1)
4899 return (rtx) data;
4901 p = XALLOCAVEC (char, size);
4902 memset (p, 1, size);
4903 coeff = c_readstr (p, mode);
4905 target = convert_to_mode (mode, (rtx) data, 1);
4906 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4907 return force_reg (mode, target);
4910 /* Expand expression EXP, which is a call to the memset builtin. Return
4911 NULL_RTX if we failed the caller should emit a normal call, otherwise
4912 try to get the result in TARGET, if convenient (and in mode MODE if that's
4913 convenient). */
4915 static rtx
4916 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4918 if (!validate_arglist (exp,
4919 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4920 return NULL_RTX;
4922 tree dest = CALL_EXPR_ARG (exp, 0);
4923 tree val = CALL_EXPR_ARG (exp, 1);
4924 tree len = CALL_EXPR_ARG (exp, 2);
4926 check_memop_access (exp, dest, NULL_TREE, len);
4928 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4931 /* Helper function to do the actual work for expand_builtin_memset. The
4932 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4933 so that this can also be called without constructing an actual CALL_EXPR.
4934 The other arguments and return value are the same as for
4935 expand_builtin_memset. */
4937 static rtx
4938 expand_builtin_memset_args (tree dest, tree val, tree len,
4939 rtx target, machine_mode mode, tree orig_exp)
4941 tree fndecl, fn;
4942 enum built_in_function fcode;
4943 machine_mode val_mode;
4944 char c;
4945 unsigned int dest_align;
4946 rtx dest_mem, dest_addr, len_rtx;
4947 HOST_WIDE_INT expected_size = -1;
4948 unsigned int expected_align = 0;
4949 unsigned HOST_WIDE_INT min_size;
4950 unsigned HOST_WIDE_INT max_size;
4951 unsigned HOST_WIDE_INT probable_max_size;
4953 dest_align = get_pointer_alignment (dest);
4955 /* If DEST is not a pointer type, don't do this operation in-line. */
4956 if (dest_align == 0)
4957 return NULL_RTX;
4959 if (currently_expanding_gimple_stmt)
4960 stringop_block_profile (currently_expanding_gimple_stmt,
4961 &expected_align, &expected_size);
4963 if (expected_align < dest_align)
4964 expected_align = dest_align;
4966 /* If the LEN parameter is zero, return DEST. */
4967 if (integer_zerop (len))
4969 /* Evaluate and ignore VAL in case it has side-effects. */
4970 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4971 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4974 /* Stabilize the arguments in case we fail. */
4975 dest = builtin_save_expr (dest);
4976 val = builtin_save_expr (val);
4977 len = builtin_save_expr (len);
4979 len_rtx = expand_normal (len);
4980 determine_block_size (len, len_rtx, &min_size, &max_size,
4981 &probable_max_size);
4982 dest_mem = get_memory_rtx (dest, len);
4983 val_mode = TYPE_MODE (unsigned_char_type_node);
4985 if (TREE_CODE (val) != INTEGER_CST)
4987 rtx val_rtx;
4989 val_rtx = expand_normal (val);
4990 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4992 /* Assume that we can memset by pieces if we can store
4993 * the coefficients by pieces (in the required modes).
4994 * We can't pass builtin_memset_gen_str as that emits RTL. */
4995 c = 1;
4996 if (tree_fits_uhwi_p (len)
4997 && can_store_by_pieces (tree_to_uhwi (len),
4998 builtin_memset_read_str, &c, dest_align,
4999 true))
5001 val_rtx = force_reg (val_mode, val_rtx);
5002 store_by_pieces (dest_mem, tree_to_uhwi (len),
5003 builtin_memset_gen_str, val_rtx, dest_align,
5004 true, RETURN_BEGIN);
5006 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
5007 dest_align, expected_align,
5008 expected_size, min_size, max_size,
5009 probable_max_size))
5010 goto do_libcall;
5012 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
5013 dest_mem = convert_memory_address (ptr_mode, dest_mem);
5014 return dest_mem;
5017 if (target_char_cast (val, &c))
5018 goto do_libcall;
5020 if (c)
5022 if (tree_fits_uhwi_p (len)
5023 && can_store_by_pieces (tree_to_uhwi (len),
5024 builtin_memset_read_str, &c, dest_align,
5025 true))
5026 store_by_pieces (dest_mem, tree_to_uhwi (len),
5027 builtin_memset_read_str, &c, dest_align, true,
5028 RETURN_BEGIN);
5029 else if (!set_storage_via_setmem (dest_mem, len_rtx,
5030 gen_int_mode (c, val_mode),
5031 dest_align, expected_align,
5032 expected_size, min_size, max_size,
5033 probable_max_size))
5034 goto do_libcall;
5036 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
5037 dest_mem = convert_memory_address (ptr_mode, dest_mem);
5038 return dest_mem;
5041 set_mem_align (dest_mem, dest_align);
5042 dest_addr = clear_storage_hints (dest_mem, len_rtx,
5043 CALL_EXPR_TAILCALL (orig_exp)
5044 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
5045 expected_align, expected_size,
5046 min_size, max_size,
5047 probable_max_size);
5049 if (dest_addr == 0)
5051 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
5052 dest_addr = convert_memory_address (ptr_mode, dest_addr);
5055 return dest_addr;
5057 do_libcall:
5058 fndecl = get_callee_fndecl (orig_exp);
5059 fcode = DECL_FUNCTION_CODE (fndecl);
5060 if (fcode == BUILT_IN_MEMSET)
5061 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
5062 dest, val, len);
5063 else if (fcode == BUILT_IN_BZERO)
5064 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
5065 dest, len);
5066 else
5067 gcc_unreachable ();
5068 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5069 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
5070 return expand_call (fn, target, target == const0_rtx);
5073 /* Expand expression EXP, which is a call to the bzero builtin. Return
5074 NULL_RTX if we failed the caller should emit a normal call. */
5076 static rtx
5077 expand_builtin_bzero (tree exp)
5079 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5080 return NULL_RTX;
5082 tree dest = CALL_EXPR_ARG (exp, 0);
5083 tree size = CALL_EXPR_ARG (exp, 1);
5085 check_memop_access (exp, dest, NULL_TREE, size);
5087 /* New argument list transforming bzero(ptr x, int y) to
5088 memset(ptr x, int 0, size_t y). This is done this way
5089 so that if it isn't expanded inline, we fallback to
5090 calling bzero instead of memset. */
5092 location_t loc = EXPR_LOCATION (exp);
5094 return expand_builtin_memset_args (dest, integer_zero_node,
5095 fold_convert_loc (loc,
5096 size_type_node, size),
5097 const0_rtx, VOIDmode, exp);
5100 /* Try to expand cmpstr operation ICODE with the given operands.
5101 Return the result rtx on success, otherwise return null. */
5103 static rtx
5104 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
5105 HOST_WIDE_INT align)
5107 machine_mode insn_mode = insn_data[icode].operand[0].mode;
5109 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
5110 target = NULL_RTX;
5112 class expand_operand ops[4];
5113 create_output_operand (&ops[0], target, insn_mode);
5114 create_fixed_operand (&ops[1], arg1_rtx);
5115 create_fixed_operand (&ops[2], arg2_rtx);
5116 create_integer_operand (&ops[3], align);
5117 if (maybe_expand_insn (icode, 4, ops))
5118 return ops[0].value;
5119 return NULL_RTX;
5122 /* Expand expression EXP, which is a call to the memcmp built-in function.
5123 Return NULL_RTX if we failed and the caller should emit a normal call,
5124 otherwise try to get the result in TARGET, if convenient.
5125 RESULT_EQ is true if we can relax the returned value to be either zero
5126 or nonzero, without caring about the sign. */
5128 static rtx
5129 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
5131 if (!validate_arglist (exp,
5132 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5133 return NULL_RTX;
5135 tree arg1 = CALL_EXPR_ARG (exp, 0);
5136 tree arg2 = CALL_EXPR_ARG (exp, 1);
5137 tree len = CALL_EXPR_ARG (exp, 2);
5138 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
5139 bool no_overflow = true;
5141 /* Diagnose calls where the specified length exceeds the size of either
5142 object. */
5143 tree size = compute_objsize (arg1, 0);
5144 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
5145 len, /*maxread=*/NULL_TREE, size,
5146 /*objsize=*/NULL_TREE);
5147 if (no_overflow)
5149 size = compute_objsize (arg2, 0);
5150 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
5151 len, /*maxread=*/NULL_TREE, size,
5152 /*objsize=*/NULL_TREE);
5155 /* If the specified length exceeds the size of either object,
5156 call the function. */
5157 if (!no_overflow)
5158 return NULL_RTX;
5160 /* Due to the performance benefit, always inline the calls first
5161 when result_eq is false. */
5162 rtx result = NULL_RTX;
5164 if (!result_eq && fcode != BUILT_IN_BCMP)
5166 result = inline_expand_builtin_string_cmp (exp, target);
5167 if (result)
5168 return result;
5171 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5172 location_t loc = EXPR_LOCATION (exp);
5174 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5175 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
5177 /* If we don't have POINTER_TYPE, call the function. */
5178 if (arg1_align == 0 || arg2_align == 0)
5179 return NULL_RTX;
5181 rtx arg1_rtx = get_memory_rtx (arg1, len);
5182 rtx arg2_rtx = get_memory_rtx (arg2, len);
5183 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
5185 /* Set MEM_SIZE as appropriate. */
5186 if (CONST_INT_P (len_rtx))
5188 set_mem_size (arg1_rtx, INTVAL (len_rtx));
5189 set_mem_size (arg2_rtx, INTVAL (len_rtx));
5192 by_pieces_constfn constfn = NULL;
5194 const char *src_str = c_getstr (arg2);
5195 if (result_eq && src_str == NULL)
5197 src_str = c_getstr (arg1);
5198 if (src_str != NULL)
5199 std::swap (arg1_rtx, arg2_rtx);
5202 /* If SRC is a string constant and block move would be done
5203 by pieces, we can avoid loading the string from memory
5204 and only stored the computed constants. */
5205 if (src_str
5206 && CONST_INT_P (len_rtx)
5207 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
5208 constfn = builtin_memcpy_read_str;
5210 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
5211 TREE_TYPE (len), target,
5212 result_eq, constfn,
5213 CONST_CAST (char *, src_str));
5215 if (result)
5217 /* Return the value in the proper mode for this function. */
5218 if (GET_MODE (result) == mode)
5219 return result;
5221 if (target != 0)
5223 convert_move (target, result, 0);
5224 return target;
5227 return convert_to_mode (mode, result, 0);
5230 return NULL_RTX;
5233 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
5234 if we failed the caller should emit a normal call, otherwise try to get
5235 the result in TARGET, if convenient. */
5237 static rtx
5238 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
5240 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5241 return NULL_RTX;
5243 tree arg1 = CALL_EXPR_ARG (exp, 0);
5244 tree arg2 = CALL_EXPR_ARG (exp, 1);
5246 if (!check_nul_terminated_array (exp, arg1)
5247 || !check_nul_terminated_array (exp, arg2))
5248 return NULL_RTX;
5250 /* Due to the performance benefit, always inline the calls first. */
5251 rtx result = NULL_RTX;
5252 result = inline_expand_builtin_string_cmp (exp, target);
5253 if (result)
5254 return result;
5256 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
5257 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
5258 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
5259 return NULL_RTX;
5261 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5262 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
5264 /* If we don't have POINTER_TYPE, call the function. */
5265 if (arg1_align == 0 || arg2_align == 0)
5266 return NULL_RTX;
5268 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
5269 arg1 = builtin_save_expr (arg1);
5270 arg2 = builtin_save_expr (arg2);
5272 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
5273 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
5275 /* Try to call cmpstrsi. */
5276 if (cmpstr_icode != CODE_FOR_nothing)
5277 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
5278 MIN (arg1_align, arg2_align));
5280 /* Try to determine at least one length and call cmpstrnsi. */
5281 if (!result && cmpstrn_icode != CODE_FOR_nothing)
5283 tree len;
5284 rtx arg3_rtx;
5286 tree len1 = c_strlen (arg1, 1);
5287 tree len2 = c_strlen (arg2, 1);
5289 if (len1)
5290 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
5291 if (len2)
5292 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
5294 /* If we don't have a constant length for the first, use the length
5295 of the second, if we know it. We don't require a constant for
5296 this case; some cost analysis could be done if both are available
5297 but neither is constant. For now, assume they're equally cheap,
5298 unless one has side effects. If both strings have constant lengths,
5299 use the smaller. */
5301 if (!len1)
5302 len = len2;
5303 else if (!len2)
5304 len = len1;
5305 else if (TREE_SIDE_EFFECTS (len1))
5306 len = len2;
5307 else if (TREE_SIDE_EFFECTS (len2))
5308 len = len1;
5309 else if (TREE_CODE (len1) != INTEGER_CST)
5310 len = len2;
5311 else if (TREE_CODE (len2) != INTEGER_CST)
5312 len = len1;
5313 else if (tree_int_cst_lt (len1, len2))
5314 len = len1;
5315 else
5316 len = len2;
5318 /* If both arguments have side effects, we cannot optimize. */
5319 if (len && !TREE_SIDE_EFFECTS (len))
5321 arg3_rtx = expand_normal (len);
5322 result = expand_cmpstrn_or_cmpmem
5323 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
5324 arg3_rtx, MIN (arg1_align, arg2_align));
5328 tree fndecl = get_callee_fndecl (exp);
5329 if (result)
5331 /* Check to see if the argument was declared attribute nonstring
5332 and if so, issue a warning since at this point it's not known
5333 to be nul-terminated. */
5334 maybe_warn_nonstring_arg (fndecl, exp);
5336 /* Return the value in the proper mode for this function. */
5337 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5338 if (GET_MODE (result) == mode)
5339 return result;
5340 if (target == 0)
5341 return convert_to_mode (mode, result, 0);
5342 convert_move (target, result, 0);
5343 return target;
5346 /* Expand the library call ourselves using a stabilized argument
5347 list to avoid re-evaluating the function's arguments twice. */
5348 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
5349 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5350 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5351 return expand_call (fn, target, target == const0_rtx);
5354 /* Expand expression EXP, which is a call to the strncmp builtin. Return
5355 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
5356 the result in TARGET, if convenient. */
5358 static rtx
5359 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
5360 ATTRIBUTE_UNUSED machine_mode mode)
5362 if (!validate_arglist (exp,
5363 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5364 return NULL_RTX;
5366 tree arg1 = CALL_EXPR_ARG (exp, 0);
5367 tree arg2 = CALL_EXPR_ARG (exp, 1);
5368 tree arg3 = CALL_EXPR_ARG (exp, 2);
5370 if (!check_nul_terminated_array (exp, arg1, arg3)
5371 || !check_nul_terminated_array (exp, arg2, arg3))
5372 return NULL_RTX;
5374 /* Due to the performance benefit, always inline the calls first. */
5375 rtx result = NULL_RTX;
5376 result = inline_expand_builtin_string_cmp (exp, target);
5377 if (result)
5378 return result;
5380 /* If c_strlen can determine an expression for one of the string
5381 lengths, and it doesn't have side effects, then emit cmpstrnsi
5382 using length MIN(strlen(string)+1, arg3). */
5383 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
5384 if (cmpstrn_icode == CODE_FOR_nothing)
5385 return NULL_RTX;
5387 tree len;
5389 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5390 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
5392 tree len1 = c_strlen (arg1, 1);
5393 tree len2 = c_strlen (arg2, 1);
5395 location_t loc = EXPR_LOCATION (exp);
5397 if (len1)
5398 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
5399 if (len2)
5400 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
5402 tree len3 = fold_convert_loc (loc, sizetype, arg3);
5404 /* If we don't have a constant length for the first, use the length
5405 of the second, if we know it. If neither string is constant length,
5406 use the given length argument. We don't require a constant for
5407 this case; some cost analysis could be done if both are available
5408 but neither is constant. For now, assume they're equally cheap,
5409 unless one has side effects. If both strings have constant lengths,
5410 use the smaller. */
5412 if (!len1 && !len2)
5413 len = len3;
5414 else if (!len1)
5415 len = len2;
5416 else if (!len2)
5417 len = len1;
5418 else if (TREE_SIDE_EFFECTS (len1))
5419 len = len2;
5420 else if (TREE_SIDE_EFFECTS (len2))
5421 len = len1;
5422 else if (TREE_CODE (len1) != INTEGER_CST)
5423 len = len2;
5424 else if (TREE_CODE (len2) != INTEGER_CST)
5425 len = len1;
5426 else if (tree_int_cst_lt (len1, len2))
5427 len = len1;
5428 else
5429 len = len2;
5431 /* If we are not using the given length, we must incorporate it here.
5432 The actual new length parameter will be MIN(len,arg3) in this case. */
5433 if (len != len3)
5435 len = fold_convert_loc (loc, sizetype, len);
5436 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
5438 rtx arg1_rtx = get_memory_rtx (arg1, len);
5439 rtx arg2_rtx = get_memory_rtx (arg2, len);
5440 rtx arg3_rtx = expand_normal (len);
5441 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
5442 arg2_rtx, TREE_TYPE (len), arg3_rtx,
5443 MIN (arg1_align, arg2_align));
5445 tree fndecl = get_callee_fndecl (exp);
5446 if (result)
5448 /* Check to see if the argument was declared attribute nonstring
5449 and if so, issue a warning since at this point it's not known
5450 to be nul-terminated. */
5451 maybe_warn_nonstring_arg (fndecl, exp);
5453 /* Return the value in the proper mode for this function. */
5454 mode = TYPE_MODE (TREE_TYPE (exp));
5455 if (GET_MODE (result) == mode)
5456 return result;
5457 if (target == 0)
5458 return convert_to_mode (mode, result, 0);
5459 convert_move (target, result, 0);
5460 return target;
5463 /* Expand the library call ourselves using a stabilized argument
5464 list to avoid re-evaluating the function's arguments twice. */
5465 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
5466 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5467 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5468 return expand_call (fn, target, target == const0_rtx);
5471 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
5472 if that's convenient. */
5475 expand_builtin_saveregs (void)
5477 rtx val;
5478 rtx_insn *seq;
5480 /* Don't do __builtin_saveregs more than once in a function.
5481 Save the result of the first call and reuse it. */
5482 if (saveregs_value != 0)
5483 return saveregs_value;
5485 /* When this function is called, it means that registers must be
5486 saved on entry to this function. So we migrate the call to the
5487 first insn of this function. */
5489 start_sequence ();
5491 /* Do whatever the machine needs done in this case. */
5492 val = targetm.calls.expand_builtin_saveregs ();
5494 seq = get_insns ();
5495 end_sequence ();
5497 saveregs_value = val;
5499 /* Put the insns after the NOTE that starts the function. If this
5500 is inside a start_sequence, make the outer-level insn chain current, so
5501 the code is placed at the start of the function. */
5502 push_topmost_sequence ();
5503 emit_insn_after (seq, entry_of_function ());
5504 pop_topmost_sequence ();
5506 return val;
5509 /* Expand a call to __builtin_next_arg. */
5511 static rtx
5512 expand_builtin_next_arg (void)
5514 /* Checking arguments is already done in fold_builtin_next_arg
5515 that must be called before this function. */
5516 return expand_binop (ptr_mode, add_optab,
5517 crtl->args.internal_arg_pointer,
5518 crtl->args.arg_offset_rtx,
5519 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5522 /* Make it easier for the backends by protecting the valist argument
5523 from multiple evaluations. */
5525 static tree
5526 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
5528 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
5530 /* The current way of determining the type of valist is completely
5531 bogus. We should have the information on the va builtin instead. */
5532 if (!vatype)
5533 vatype = targetm.fn_abi_va_list (cfun->decl);
5535 if (TREE_CODE (vatype) == ARRAY_TYPE)
5537 if (TREE_SIDE_EFFECTS (valist))
5538 valist = save_expr (valist);
5540 /* For this case, the backends will be expecting a pointer to
5541 vatype, but it's possible we've actually been given an array
5542 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
5543 So fix it. */
5544 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5546 tree p1 = build_pointer_type (TREE_TYPE (vatype));
5547 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
5550 else
5552 tree pt = build_pointer_type (vatype);
5554 if (! needs_lvalue)
5556 if (! TREE_SIDE_EFFECTS (valist))
5557 return valist;
5559 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
5560 TREE_SIDE_EFFECTS (valist) = 1;
5563 if (TREE_SIDE_EFFECTS (valist))
5564 valist = save_expr (valist);
5565 valist = fold_build2_loc (loc, MEM_REF,
5566 vatype, valist, build_int_cst (pt, 0));
5569 return valist;
5572 /* The "standard" definition of va_list is void*. */
5574 tree
5575 std_build_builtin_va_list (void)
5577 return ptr_type_node;
5580 /* The "standard" abi va_list is va_list_type_node. */
5582 tree
5583 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
5585 return va_list_type_node;
5588 /* The "standard" type of va_list is va_list_type_node. */
5590 tree
5591 std_canonical_va_list_type (tree type)
5593 tree wtype, htype;
5595 wtype = va_list_type_node;
5596 htype = type;
5598 if (TREE_CODE (wtype) == ARRAY_TYPE)
5600 /* If va_list is an array type, the argument may have decayed
5601 to a pointer type, e.g. by being passed to another function.
5602 In that case, unwrap both types so that we can compare the
5603 underlying records. */
5604 if (TREE_CODE (htype) == ARRAY_TYPE
5605 || POINTER_TYPE_P (htype))
5607 wtype = TREE_TYPE (wtype);
5608 htype = TREE_TYPE (htype);
5611 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
5612 return va_list_type_node;
5614 return NULL_TREE;
5617 /* The "standard" implementation of va_start: just assign `nextarg' to
5618 the variable. */
5620 void
5621 std_expand_builtin_va_start (tree valist, rtx nextarg)
5623 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
5624 convert_move (va_r, nextarg, 0);
5627 /* Expand EXP, a call to __builtin_va_start. */
5629 static rtx
5630 expand_builtin_va_start (tree exp)
5632 rtx nextarg;
5633 tree valist;
5634 location_t loc = EXPR_LOCATION (exp);
5636 if (call_expr_nargs (exp) < 2)
5638 error_at (loc, "too few arguments to function %<va_start%>");
5639 return const0_rtx;
5642 if (fold_builtin_next_arg (exp, true))
5643 return const0_rtx;
5645 nextarg = expand_builtin_next_arg ();
5646 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
5648 if (targetm.expand_builtin_va_start)
5649 targetm.expand_builtin_va_start (valist, nextarg);
5650 else
5651 std_expand_builtin_va_start (valist, nextarg);
5653 return const0_rtx;
5656 /* Expand EXP, a call to __builtin_va_end. */
5658 static rtx
5659 expand_builtin_va_end (tree exp)
5661 tree valist = CALL_EXPR_ARG (exp, 0);
5663 /* Evaluate for side effects, if needed. I hate macros that don't
5664 do that. */
5665 if (TREE_SIDE_EFFECTS (valist))
5666 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5668 return const0_rtx;
5671 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5672 builtin rather than just as an assignment in stdarg.h because of the
5673 nastiness of array-type va_list types. */
5675 static rtx
5676 expand_builtin_va_copy (tree exp)
5678 tree dst, src, t;
5679 location_t loc = EXPR_LOCATION (exp);
5681 dst = CALL_EXPR_ARG (exp, 0);
5682 src = CALL_EXPR_ARG (exp, 1);
5684 dst = stabilize_va_list_loc (loc, dst, 1);
5685 src = stabilize_va_list_loc (loc, src, 0);
5687 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5689 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5691 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5692 TREE_SIDE_EFFECTS (t) = 1;
5693 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5695 else
5697 rtx dstb, srcb, size;
5699 /* Evaluate to pointers. */
5700 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5701 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5702 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5703 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5705 dstb = convert_memory_address (Pmode, dstb);
5706 srcb = convert_memory_address (Pmode, srcb);
5708 /* "Dereference" to BLKmode memories. */
5709 dstb = gen_rtx_MEM (BLKmode, dstb);
5710 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5711 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5712 srcb = gen_rtx_MEM (BLKmode, srcb);
5713 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5714 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5716 /* Copy. */
5717 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5720 return const0_rtx;
5723 /* Expand a call to one of the builtin functions __builtin_frame_address or
5724 __builtin_return_address. */
5726 static rtx
5727 expand_builtin_frame_address (tree fndecl, tree exp)
5729 /* The argument must be a nonnegative integer constant.
5730 It counts the number of frames to scan up the stack.
5731 The value is either the frame pointer value or the return
5732 address saved in that frame. */
5733 if (call_expr_nargs (exp) == 0)
5734 /* Warning about missing arg was already issued. */
5735 return const0_rtx;
5736 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5738 error ("invalid argument to %qD", fndecl);
5739 return const0_rtx;
5741 else
5743 /* Number of frames to scan up the stack. */
5744 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5746 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5748 /* Some ports cannot access arbitrary stack frames. */
5749 if (tem == NULL)
5751 warning (0, "unsupported argument to %qD", fndecl);
5752 return const0_rtx;
5755 if (count)
5757 /* Warn since no effort is made to ensure that any frame
5758 beyond the current one exists or can be safely reached. */
5759 warning (OPT_Wframe_address, "calling %qD with "
5760 "a nonzero argument is unsafe", fndecl);
5763 /* For __builtin_frame_address, return what we've got. */
5764 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5765 return tem;
5767 if (!REG_P (tem)
5768 && ! CONSTANT_P (tem))
5769 tem = copy_addr_to_reg (tem);
5770 return tem;
5774 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5775 failed and the caller should emit a normal call. */
5777 static rtx
5778 expand_builtin_alloca (tree exp)
5780 rtx op0;
5781 rtx result;
5782 unsigned int align;
5783 tree fndecl = get_callee_fndecl (exp);
5784 HOST_WIDE_INT max_size;
5785 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5786 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5787 bool valid_arglist
5788 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5789 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5790 VOID_TYPE)
5791 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5792 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5793 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5795 if (!valid_arglist)
5796 return NULL_RTX;
5798 if ((alloca_for_var
5799 && warn_vla_limit >= HOST_WIDE_INT_MAX
5800 && warn_alloc_size_limit < warn_vla_limit)
5801 || (!alloca_for_var
5802 && warn_alloca_limit >= HOST_WIDE_INT_MAX
5803 && warn_alloc_size_limit < warn_alloca_limit
5806 /* -Walloca-larger-than and -Wvla-larger-than settings of
5807 less than HOST_WIDE_INT_MAX override the more general
5808 -Walloc-size-larger-than so unless either of the former
5809 options is smaller than the last one (wchich would imply
5810 that the call was already checked), check the alloca
5811 arguments for overflow. */
5812 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5813 int idx[] = { 0, -1 };
5814 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5817 /* Compute the argument. */
5818 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5820 /* Compute the alignment. */
5821 align = (fcode == BUILT_IN_ALLOCA
5822 ? BIGGEST_ALIGNMENT
5823 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5825 /* Compute the maximum size. */
5826 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5827 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5828 : -1);
5830 /* Allocate the desired space. If the allocation stems from the declaration
5831 of a variable-sized object, it cannot accumulate. */
5832 result
5833 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5834 result = convert_memory_address (ptr_mode, result);
5836 /* Dynamic allocations for variables are recorded during gimplification. */
5837 if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
5838 record_dynamic_alloc (exp);
5840 return result;
5843 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5844 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5845 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5846 handle_builtin_stack_restore function. */
5848 static rtx
5849 expand_asan_emit_allocas_unpoison (tree exp)
5851 tree arg0 = CALL_EXPR_ARG (exp, 0);
5852 tree arg1 = CALL_EXPR_ARG (exp, 1);
5853 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5854 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5855 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5856 stack_pointer_rtx, NULL_RTX, 0,
5857 OPTAB_LIB_WIDEN);
5858 off = convert_modes (ptr_mode, Pmode, off, 0);
5859 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5860 OPTAB_LIB_WIDEN);
5861 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5862 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5863 top, ptr_mode, bot, ptr_mode);
5864 return ret;
5867 /* Expand a call to bswap builtin in EXP.
5868 Return NULL_RTX if a normal call should be emitted rather than expanding the
5869 function in-line. If convenient, the result should be placed in TARGET.
5870 SUBTARGET may be used as the target for computing one of EXP's operands. */
5872 static rtx
5873 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5874 rtx subtarget)
5876 tree arg;
5877 rtx op0;
5879 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5880 return NULL_RTX;
5882 arg = CALL_EXPR_ARG (exp, 0);
5883 op0 = expand_expr (arg,
5884 subtarget && GET_MODE (subtarget) == target_mode
5885 ? subtarget : NULL_RTX,
5886 target_mode, EXPAND_NORMAL);
5887 if (GET_MODE (op0) != target_mode)
5888 op0 = convert_to_mode (target_mode, op0, 1);
5890 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5892 gcc_assert (target);
5894 return convert_to_mode (target_mode, target, 1);
5897 /* Expand a call to a unary builtin in EXP.
5898 Return NULL_RTX if a normal call should be emitted rather than expanding the
5899 function in-line. If convenient, the result should be placed in TARGET.
5900 SUBTARGET may be used as the target for computing one of EXP's operands. */
5902 static rtx
5903 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5904 rtx subtarget, optab op_optab)
5906 rtx op0;
5908 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5909 return NULL_RTX;
5911 /* Compute the argument. */
5912 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5913 (subtarget
5914 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5915 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5916 VOIDmode, EXPAND_NORMAL);
5917 /* Compute op, into TARGET if possible.
5918 Set TARGET to wherever the result comes back. */
5919 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5920 op_optab, op0, target, op_optab != clrsb_optab);
5921 gcc_assert (target);
5923 return convert_to_mode (target_mode, target, 0);
5926 /* Expand a call to __builtin_expect. We just return our argument
5927 as the builtin_expect semantic should've been already executed by
5928 tree branch prediction pass. */
5930 static rtx
5931 expand_builtin_expect (tree exp, rtx target)
5933 tree arg;
5935 if (call_expr_nargs (exp) < 2)
5936 return const0_rtx;
5937 arg = CALL_EXPR_ARG (exp, 0);
5939 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5940 /* When guessing was done, the hints should be already stripped away. */
5941 gcc_assert (!flag_guess_branch_prob
5942 || optimize == 0 || seen_error ());
5943 return target;
5946 /* Expand a call to __builtin_expect_with_probability. We just return our
5947 argument as the builtin_expect semantic should've been already executed by
5948 tree branch prediction pass. */
5950 static rtx
5951 expand_builtin_expect_with_probability (tree exp, rtx target)
5953 tree arg;
5955 if (call_expr_nargs (exp) < 3)
5956 return const0_rtx;
5957 arg = CALL_EXPR_ARG (exp, 0);
5959 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5960 /* When guessing was done, the hints should be already stripped away. */
5961 gcc_assert (!flag_guess_branch_prob
5962 || optimize == 0 || seen_error ());
5963 return target;
5967 /* Expand a call to __builtin_assume_aligned. We just return our first
5968 argument as the builtin_assume_aligned semantic should've been already
5969 executed by CCP. */
5971 static rtx
5972 expand_builtin_assume_aligned (tree exp, rtx target)
5974 if (call_expr_nargs (exp) < 2)
5975 return const0_rtx;
5976 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5977 EXPAND_NORMAL);
5978 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5979 && (call_expr_nargs (exp) < 3
5980 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5981 return target;
5984 void
5985 expand_builtin_trap (void)
5987 if (targetm.have_trap ())
5989 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5990 /* For trap insns when not accumulating outgoing args force
5991 REG_ARGS_SIZE note to prevent crossjumping of calls with
5992 different args sizes. */
5993 if (!ACCUMULATE_OUTGOING_ARGS)
5994 add_args_size_note (insn, stack_pointer_delta);
5996 else
5998 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5999 tree call_expr = build_call_expr (fn, 0);
6000 expand_call (call_expr, NULL_RTX, false);
6003 emit_barrier ();
6006 /* Expand a call to __builtin_unreachable. We do nothing except emit
6007 a barrier saying that control flow will not pass here.
6009 It is the responsibility of the program being compiled to ensure
6010 that control flow does never reach __builtin_unreachable. */
6011 static void
6012 expand_builtin_unreachable (void)
6014 emit_barrier ();
6017 /* Expand EXP, a call to fabs, fabsf or fabsl.
6018 Return NULL_RTX if a normal call should be emitted rather than expanding
6019 the function inline. If convenient, the result should be placed
6020 in TARGET. SUBTARGET may be used as the target for computing
6021 the operand. */
6023 static rtx
6024 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
6026 machine_mode mode;
6027 tree arg;
6028 rtx op0;
6030 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
6031 return NULL_RTX;
6033 arg = CALL_EXPR_ARG (exp, 0);
6034 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
6035 mode = TYPE_MODE (TREE_TYPE (arg));
6036 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6037 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
6040 /* Expand EXP, a call to copysign, copysignf, or copysignl.
6041 Return NULL is a normal call should be emitted rather than expanding the
6042 function inline. If convenient, the result should be placed in TARGET.
6043 SUBTARGET may be used as the target for computing the operand. */
6045 static rtx
6046 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
6048 rtx op0, op1;
6049 tree arg;
6051 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
6052 return NULL_RTX;
6054 arg = CALL_EXPR_ARG (exp, 0);
6055 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6057 arg = CALL_EXPR_ARG (exp, 1);
6058 op1 = expand_normal (arg);
6060 return expand_copysign (op0, op1, target);
6063 /* Expand a call to __builtin___clear_cache. */
6065 static rtx
6066 expand_builtin___clear_cache (tree exp)
6068 if (!targetm.code_for_clear_cache)
6070 #ifdef CLEAR_INSN_CACHE
6071 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
6072 does something. Just do the default expansion to a call to
6073 __clear_cache(). */
6074 return NULL_RTX;
6075 #else
6076 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
6077 does nothing. There is no need to call it. Do nothing. */
6078 return const0_rtx;
6079 #endif /* CLEAR_INSN_CACHE */
6082 /* We have a "clear_cache" insn, and it will handle everything. */
6083 tree begin, end;
6084 rtx begin_rtx, end_rtx;
6086 /* We must not expand to a library call. If we did, any
6087 fallback library function in libgcc that might contain a call to
6088 __builtin___clear_cache() would recurse infinitely. */
6089 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6091 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
6092 return const0_rtx;
6095 if (targetm.have_clear_cache ())
6097 class expand_operand ops[2];
6099 begin = CALL_EXPR_ARG (exp, 0);
6100 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
6102 end = CALL_EXPR_ARG (exp, 1);
6103 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
6105 create_address_operand (&ops[0], begin_rtx);
6106 create_address_operand (&ops[1], end_rtx);
6107 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
6108 return const0_rtx;
6110 return const0_rtx;
6113 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
6115 static rtx
6116 round_trampoline_addr (rtx tramp)
6118 rtx temp, addend, mask;
6120 /* If we don't need too much alignment, we'll have been guaranteed
6121 proper alignment by get_trampoline_type. */
6122 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
6123 return tramp;
6125 /* Round address up to desired boundary. */
6126 temp = gen_reg_rtx (Pmode);
6127 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
6128 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
6130 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
6131 temp, 0, OPTAB_LIB_WIDEN);
6132 tramp = expand_simple_binop (Pmode, AND, temp, mask,
6133 temp, 0, OPTAB_LIB_WIDEN);
6135 return tramp;
6138 static rtx
6139 expand_builtin_init_trampoline (tree exp, bool onstack)
6141 tree t_tramp, t_func, t_chain;
6142 rtx m_tramp, r_tramp, r_chain, tmp;
6144 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
6145 POINTER_TYPE, VOID_TYPE))
6146 return NULL_RTX;
6148 t_tramp = CALL_EXPR_ARG (exp, 0);
6149 t_func = CALL_EXPR_ARG (exp, 1);
6150 t_chain = CALL_EXPR_ARG (exp, 2);
6152 r_tramp = expand_normal (t_tramp);
6153 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
6154 MEM_NOTRAP_P (m_tramp) = 1;
6156 /* If ONSTACK, the TRAMP argument should be the address of a field
6157 within the local function's FRAME decl. Either way, let's see if
6158 we can fill in the MEM_ATTRs for this memory. */
6159 if (TREE_CODE (t_tramp) == ADDR_EXPR)
6160 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
6162 /* Creator of a heap trampoline is responsible for making sure the
6163 address is aligned to at least STACK_BOUNDARY. Normally malloc
6164 will ensure this anyhow. */
6165 tmp = round_trampoline_addr (r_tramp);
6166 if (tmp != r_tramp)
6168 m_tramp = change_address (m_tramp, BLKmode, tmp);
6169 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
6170 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
6173 /* The FUNC argument should be the address of the nested function.
6174 Extract the actual function decl to pass to the hook. */
6175 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
6176 t_func = TREE_OPERAND (t_func, 0);
6177 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
6179 r_chain = expand_normal (t_chain);
6181 /* Generate insns to initialize the trampoline. */
6182 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
6184 if (onstack)
6186 trampolines_created = 1;
6188 if (targetm.calls.custom_function_descriptors != 0)
6189 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
6190 "trampoline generated for nested function %qD", t_func);
6193 return const0_rtx;
6196 static rtx
6197 expand_builtin_adjust_trampoline (tree exp)
6199 rtx tramp;
6201 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6202 return NULL_RTX;
6204 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
6205 tramp = round_trampoline_addr (tramp);
6206 if (targetm.calls.trampoline_adjust_address)
6207 tramp = targetm.calls.trampoline_adjust_address (tramp);
6209 return tramp;
6212 /* Expand a call to the builtin descriptor initialization routine.
6213 A descriptor is made up of a couple of pointers to the static
6214 chain and the code entry in this order. */
6216 static rtx
6217 expand_builtin_init_descriptor (tree exp)
6219 tree t_descr, t_func, t_chain;
6220 rtx m_descr, r_descr, r_func, r_chain;
6222 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
6223 VOID_TYPE))
6224 return NULL_RTX;
6226 t_descr = CALL_EXPR_ARG (exp, 0);
6227 t_func = CALL_EXPR_ARG (exp, 1);
6228 t_chain = CALL_EXPR_ARG (exp, 2);
6230 r_descr = expand_normal (t_descr);
6231 m_descr = gen_rtx_MEM (BLKmode, r_descr);
6232 MEM_NOTRAP_P (m_descr) = 1;
6233 set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
6235 r_func = expand_normal (t_func);
6236 r_chain = expand_normal (t_chain);
6238 /* Generate insns to initialize the descriptor. */
6239 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
6240 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
6241 POINTER_SIZE / BITS_PER_UNIT), r_func);
6243 return const0_rtx;
6246 /* Expand a call to the builtin descriptor adjustment routine. */
6248 static rtx
6249 expand_builtin_adjust_descriptor (tree exp)
6251 rtx tramp;
6253 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6254 return NULL_RTX;
6256 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
6258 /* Unalign the descriptor to allow runtime identification. */
6259 tramp = plus_constant (ptr_mode, tramp,
6260 targetm.calls.custom_function_descriptors);
6262 return force_operand (tramp, NULL_RTX);
6265 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
6266 function. The function first checks whether the back end provides
6267 an insn to implement signbit for the respective mode. If not, it
6268 checks whether the floating point format of the value is such that
6269 the sign bit can be extracted. If that is not the case, error out.
6270 EXP is the expression that is a call to the builtin function; if
6271 convenient, the result should be placed in TARGET. */
6272 static rtx
6273 expand_builtin_signbit (tree exp, rtx target)
6275 const struct real_format *fmt;
6276 scalar_float_mode fmode;
6277 scalar_int_mode rmode, imode;
6278 tree arg;
6279 int word, bitpos;
6280 enum insn_code icode;
6281 rtx temp;
6282 location_t loc = EXPR_LOCATION (exp);
6284 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
6285 return NULL_RTX;
6287 arg = CALL_EXPR_ARG (exp, 0);
6288 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
6289 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
6290 fmt = REAL_MODE_FORMAT (fmode);
6292 arg = builtin_save_expr (arg);
6294 /* Expand the argument yielding a RTX expression. */
6295 temp = expand_normal (arg);
6297 /* Check if the back end provides an insn that handles signbit for the
6298 argument's mode. */
6299 icode = optab_handler (signbit_optab, fmode);
6300 if (icode != CODE_FOR_nothing)
6302 rtx_insn *last = get_last_insn ();
6303 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6304 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
6305 return target;
6306 delete_insns_since (last);
6309 /* For floating point formats without a sign bit, implement signbit
6310 as "ARG < 0.0". */
6311 bitpos = fmt->signbit_ro;
6312 if (bitpos < 0)
6314 /* But we can't do this if the format supports signed zero. */
6315 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
6317 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
6318 build_real (TREE_TYPE (arg), dconst0));
6319 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
6322 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
6324 imode = int_mode_for_mode (fmode).require ();
6325 temp = gen_lowpart (imode, temp);
6327 else
6329 imode = word_mode;
6330 /* Handle targets with different FP word orders. */
6331 if (FLOAT_WORDS_BIG_ENDIAN)
6332 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
6333 else
6334 word = bitpos / BITS_PER_WORD;
6335 temp = operand_subword_force (temp, word, fmode);
6336 bitpos = bitpos % BITS_PER_WORD;
6339 /* Force the intermediate word_mode (or narrower) result into a
6340 register. This avoids attempting to create paradoxical SUBREGs
6341 of floating point modes below. */
6342 temp = force_reg (imode, temp);
6344 /* If the bitpos is within the "result mode" lowpart, the operation
6345 can be implement with a single bitwise AND. Otherwise, we need
6346 a right shift and an AND. */
6348 if (bitpos < GET_MODE_BITSIZE (rmode))
6350 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
6352 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
6353 temp = gen_lowpart (rmode, temp);
6354 temp = expand_binop (rmode, and_optab, temp,
6355 immed_wide_int_const (mask, rmode),
6356 NULL_RTX, 1, OPTAB_LIB_WIDEN);
6358 else
6360 /* Perform a logical right shift to place the signbit in the least
6361 significant bit, then truncate the result to the desired mode
6362 and mask just this bit. */
6363 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
6364 temp = gen_lowpart (rmode, temp);
6365 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
6366 NULL_RTX, 1, OPTAB_LIB_WIDEN);
6369 return temp;
6372 /* Expand fork or exec calls. TARGET is the desired target of the
6373 call. EXP is the call. FN is the
6374 identificator of the actual function. IGNORE is nonzero if the
6375 value is to be ignored. */
6377 static rtx
6378 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
6380 tree id, decl;
6381 tree call;
6383 if (DECL_FUNCTION_CODE (fn) != BUILT_IN_FORK)
6385 /* Detect unterminated path. */
6386 if (!check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0)))
6387 return NULL_RTX;
6389 /* Also detect unterminated first argument. */
6390 switch (DECL_FUNCTION_CODE (fn))
6392 case BUILT_IN_EXECL:
6393 case BUILT_IN_EXECLE:
6394 case BUILT_IN_EXECLP:
6395 if (!check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0)))
6396 return NULL_RTX;
6397 default:
6398 break;
6403 /* If we are not profiling, just call the function. */
6404 if (!profile_arc_flag)
6405 return NULL_RTX;
6407 /* Otherwise call the wrapper. This should be equivalent for the rest of
6408 compiler, so the code does not diverge, and the wrapper may run the
6409 code necessary for keeping the profiling sane. */
6411 switch (DECL_FUNCTION_CODE (fn))
6413 case BUILT_IN_FORK:
6414 id = get_identifier ("__gcov_fork");
6415 break;
6417 case BUILT_IN_EXECL:
6418 id = get_identifier ("__gcov_execl");
6419 break;
6421 case BUILT_IN_EXECV:
6422 id = get_identifier ("__gcov_execv");
6423 break;
6425 case BUILT_IN_EXECLP:
6426 id = get_identifier ("__gcov_execlp");
6427 break;
6429 case BUILT_IN_EXECLE:
6430 id = get_identifier ("__gcov_execle");
6431 break;
6433 case BUILT_IN_EXECVP:
6434 id = get_identifier ("__gcov_execvp");
6435 break;
6437 case BUILT_IN_EXECVE:
6438 id = get_identifier ("__gcov_execve");
6439 break;
6441 default:
6442 gcc_unreachable ();
6445 decl = build_decl (DECL_SOURCE_LOCATION (fn),
6446 FUNCTION_DECL, id, TREE_TYPE (fn));
6447 DECL_EXTERNAL (decl) = 1;
6448 TREE_PUBLIC (decl) = 1;
6449 DECL_ARTIFICIAL (decl) = 1;
6450 TREE_NOTHROW (decl) = 1;
6451 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
6452 DECL_VISIBILITY_SPECIFIED (decl) = 1;
6453 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
6454 return expand_call (call, target, ignore);
6459 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
6460 the pointer in these functions is void*, the tree optimizers may remove
6461 casts. The mode computed in expand_builtin isn't reliable either, due
6462 to __sync_bool_compare_and_swap.
6464 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
6465 group of builtins. This gives us log2 of the mode size. */
6467 static inline machine_mode
6468 get_builtin_sync_mode (int fcode_diff)
6470 /* The size is not negotiable, so ask not to get BLKmode in return
6471 if the target indicates that a smaller size would be better. */
6472 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
6475 /* Expand the memory expression LOC and return the appropriate memory operand
6476 for the builtin_sync operations. */
6478 static rtx
6479 get_builtin_sync_mem (tree loc, machine_mode mode)
6481 rtx addr, mem;
6482 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
6483 ? TREE_TYPE (TREE_TYPE (loc))
6484 : TREE_TYPE (loc));
6485 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
6487 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
6488 addr = convert_memory_address (addr_mode, addr);
6490 /* Note that we explicitly do not want any alias information for this
6491 memory, so that we kill all other live memories. Otherwise we don't
6492 satisfy the full barrier semantics of the intrinsic. */
6493 mem = gen_rtx_MEM (mode, addr);
6495 set_mem_addr_space (mem, addr_space);
6497 mem = validize_mem (mem);
6499 /* The alignment needs to be at least according to that of the mode. */
6500 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
6501 get_pointer_alignment (loc)));
6502 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6503 MEM_VOLATILE_P (mem) = 1;
6505 return mem;
6508 /* Make sure an argument is in the right mode.
6509 EXP is the tree argument.
6510 MODE is the mode it should be in. */
6512 static rtx
6513 expand_expr_force_mode (tree exp, machine_mode mode)
6515 rtx val;
6516 machine_mode old_mode;
6518 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
6519 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6520 of CONST_INTs, where we know the old_mode only from the call argument. */
6522 old_mode = GET_MODE (val);
6523 if (old_mode == VOIDmode)
6524 old_mode = TYPE_MODE (TREE_TYPE (exp));
6525 val = convert_modes (mode, old_mode, val, 1);
6526 return val;
6530 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6531 EXP is the CALL_EXPR. CODE is the rtx code
6532 that corresponds to the arithmetic or logical operation from the name;
6533 an exception here is that NOT actually means NAND. TARGET is an optional
6534 place for us to store the results; AFTER is true if this is the
6535 fetch_and_xxx form. */
6537 static rtx
6538 expand_builtin_sync_operation (machine_mode mode, tree exp,
6539 enum rtx_code code, bool after,
6540 rtx target)
6542 rtx val, mem;
6543 location_t loc = EXPR_LOCATION (exp);
6545 if (code == NOT && warn_sync_nand)
6547 tree fndecl = get_callee_fndecl (exp);
6548 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6550 static bool warned_f_a_n, warned_n_a_f;
6552 switch (fcode)
6554 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6555 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6556 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6557 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6558 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6559 if (warned_f_a_n)
6560 break;
6562 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
6563 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6564 warned_f_a_n = true;
6565 break;
6567 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6568 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6569 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6570 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6571 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6572 if (warned_n_a_f)
6573 break;
6575 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
6576 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6577 warned_n_a_f = true;
6578 break;
6580 default:
6581 gcc_unreachable ();
6585 /* Expand the operands. */
6586 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6587 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6589 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
6590 after);
6593 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6594 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6595 true if this is the boolean form. TARGET is a place for us to store the
6596 results; this is NOT optional if IS_BOOL is true. */
6598 static rtx
6599 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
6600 bool is_bool, rtx target)
6602 rtx old_val, new_val, mem;
6603 rtx *pbool, *poval;
6605 /* Expand the operands. */
6606 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6607 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6608 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6610 pbool = poval = NULL;
6611 if (target != const0_rtx)
6613 if (is_bool)
6614 pbool = &target;
6615 else
6616 poval = &target;
6618 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
6619 false, MEMMODEL_SYNC_SEQ_CST,
6620 MEMMODEL_SYNC_SEQ_CST))
6621 return NULL_RTX;
6623 return target;
6626 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6627 general form is actually an atomic exchange, and some targets only
6628 support a reduced form with the second argument being a constant 1.
6629 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6630 the results. */
6632 static rtx
6633 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
6634 rtx target)
6636 rtx val, mem;
6638 /* Expand the operands. */
6639 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6640 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6642 return expand_sync_lock_test_and_set (target, mem, val);
6645 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6647 static void
6648 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
6650 rtx mem;
6652 /* Expand the operands. */
6653 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6655 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
6658 /* Given an integer representing an ``enum memmodel'', verify its
6659 correctness and return the memory model enum. */
6661 static enum memmodel
6662 get_memmodel (tree exp)
6664 rtx op;
6665 unsigned HOST_WIDE_INT val;
6666 location_t loc
6667 = expansion_point_location_if_in_system_header (input_location);
6669 /* If the parameter is not a constant, it's a run time value so we'll just
6670 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6671 if (TREE_CODE (exp) != INTEGER_CST)
6672 return MEMMODEL_SEQ_CST;
6674 op = expand_normal (exp);
6676 val = INTVAL (op);
6677 if (targetm.memmodel_check)
6678 val = targetm.memmodel_check (val);
6679 else if (val & ~MEMMODEL_MASK)
6681 warning_at (loc, OPT_Winvalid_memory_model,
6682 "unknown architecture specifier in memory model to builtin");
6683 return MEMMODEL_SEQ_CST;
6686 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6687 if (memmodel_base (val) >= MEMMODEL_LAST)
6689 warning_at (loc, OPT_Winvalid_memory_model,
6690 "invalid memory model argument to builtin");
6691 return MEMMODEL_SEQ_CST;
6694 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6695 be conservative and promote consume to acquire. */
6696 if (val == MEMMODEL_CONSUME)
6697 val = MEMMODEL_ACQUIRE;
6699 return (enum memmodel) val;
6702 /* Expand the __atomic_exchange intrinsic:
6703 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6704 EXP is the CALL_EXPR.
6705 TARGET is an optional place for us to store the results. */
6707 static rtx
6708 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
6710 rtx val, mem;
6711 enum memmodel model;
6713 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6715 if (!flag_inline_atomics)
6716 return NULL_RTX;
6718 /* Expand the operands. */
6719 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6720 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6722 return expand_atomic_exchange (target, mem, val, model);
6725 /* Expand the __atomic_compare_exchange intrinsic:
6726 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6727 TYPE desired, BOOL weak,
6728 enum memmodel success,
6729 enum memmodel failure)
6730 EXP is the CALL_EXPR.
6731 TARGET is an optional place for us to store the results. */
6733 static rtx
6734 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
6735 rtx target)
6737 rtx expect, desired, mem, oldval;
6738 rtx_code_label *label;
6739 enum memmodel success, failure;
6740 tree weak;
6741 bool is_weak;
6742 location_t loc
6743 = expansion_point_location_if_in_system_header (input_location);
6745 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6746 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6748 if (failure > success)
6750 warning_at (loc, OPT_Winvalid_memory_model,
6751 "failure memory model cannot be stronger than success "
6752 "memory model for %<__atomic_compare_exchange%>");
6753 success = MEMMODEL_SEQ_CST;
6756 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6758 warning_at (loc, OPT_Winvalid_memory_model,
6759 "invalid failure memory model for "
6760 "%<__atomic_compare_exchange%>");
6761 failure = MEMMODEL_SEQ_CST;
6762 success = MEMMODEL_SEQ_CST;
6766 if (!flag_inline_atomics)
6767 return NULL_RTX;
6769 /* Expand the operands. */
6770 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6772 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6773 expect = convert_memory_address (Pmode, expect);
6774 expect = gen_rtx_MEM (mode, expect);
6775 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6777 weak = CALL_EXPR_ARG (exp, 3);
6778 is_weak = false;
6779 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6780 is_weak = true;
6782 if (target == const0_rtx)
6783 target = NULL;
6785 /* Lest the rtl backend create a race condition with an imporoper store
6786 to memory, always create a new pseudo for OLDVAL. */
6787 oldval = NULL;
6789 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6790 is_weak, success, failure))
6791 return NULL_RTX;
6793 /* Conditionally store back to EXPECT, lest we create a race condition
6794 with an improper store to memory. */
6795 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6796 the normal case where EXPECT is totally private, i.e. a register. At
6797 which point the store can be unconditional. */
6798 label = gen_label_rtx ();
6799 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6800 GET_MODE (target), 1, label);
6801 emit_move_insn (expect, oldval);
6802 emit_label (label);
6804 return target;
6807 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6808 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6809 call. The weak parameter must be dropped to match the expected parameter
6810 list and the expected argument changed from value to pointer to memory
6811 slot. */
6813 static void
6814 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6816 unsigned int z;
6817 vec<tree, va_gc> *vec;
6819 vec_alloc (vec, 5);
6820 vec->quick_push (gimple_call_arg (call, 0));
6821 tree expected = gimple_call_arg (call, 1);
6822 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6823 TREE_TYPE (expected));
6824 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6825 if (expd != x)
6826 emit_move_insn (x, expd);
6827 tree v = make_tree (TREE_TYPE (expected), x);
6828 vec->quick_push (build1 (ADDR_EXPR,
6829 build_pointer_type (TREE_TYPE (expected)), v));
6830 vec->quick_push (gimple_call_arg (call, 2));
6831 /* Skip the boolean weak parameter. */
6832 for (z = 4; z < 6; z++)
6833 vec->quick_push (gimple_call_arg (call, z));
6834 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6835 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6836 gcc_assert (bytes_log2 < 5);
6837 built_in_function fncode
6838 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6839 + bytes_log2);
6840 tree fndecl = builtin_decl_explicit (fncode);
6841 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6842 fndecl);
6843 tree exp = build_call_vec (boolean_type_node, fn, vec);
6844 tree lhs = gimple_call_lhs (call);
6845 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6846 if (lhs)
6848 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6849 if (GET_MODE (boolret) != mode)
6850 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6851 x = force_reg (mode, x);
6852 write_complex_part (target, boolret, true);
6853 write_complex_part (target, x, false);
6857 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6859 void
6860 expand_ifn_atomic_compare_exchange (gcall *call)
6862 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6863 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6864 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6865 rtx expect, desired, mem, oldval, boolret;
6866 enum memmodel success, failure;
6867 tree lhs;
6868 bool is_weak;
6869 location_t loc
6870 = expansion_point_location_if_in_system_header (gimple_location (call));
6872 success = get_memmodel (gimple_call_arg (call, 4));
6873 failure = get_memmodel (gimple_call_arg (call, 5));
6875 if (failure > success)
6877 warning_at (loc, OPT_Winvalid_memory_model,
6878 "failure memory model cannot be stronger than success "
6879 "memory model for %<__atomic_compare_exchange%>");
6880 success = MEMMODEL_SEQ_CST;
6883 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6885 warning_at (loc, OPT_Winvalid_memory_model,
6886 "invalid failure memory model for "
6887 "%<__atomic_compare_exchange%>");
6888 failure = MEMMODEL_SEQ_CST;
6889 success = MEMMODEL_SEQ_CST;
6892 if (!flag_inline_atomics)
6894 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6895 return;
6898 /* Expand the operands. */
6899 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6901 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6902 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6904 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6906 boolret = NULL;
6907 oldval = NULL;
6909 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6910 is_weak, success, failure))
6912 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6913 return;
6916 lhs = gimple_call_lhs (call);
6917 if (lhs)
6919 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6920 if (GET_MODE (boolret) != mode)
6921 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6922 write_complex_part (target, boolret, true);
6923 write_complex_part (target, oldval, false);
6927 /* Expand the __atomic_load intrinsic:
6928 TYPE __atomic_load (TYPE *object, enum memmodel)
6929 EXP is the CALL_EXPR.
6930 TARGET is an optional place for us to store the results. */
6932 static rtx
6933 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6935 rtx mem;
6936 enum memmodel model;
6938 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6939 if (is_mm_release (model) || is_mm_acq_rel (model))
6941 location_t loc
6942 = expansion_point_location_if_in_system_header (input_location);
6943 warning_at (loc, OPT_Winvalid_memory_model,
6944 "invalid memory model for %<__atomic_load%>");
6945 model = MEMMODEL_SEQ_CST;
6948 if (!flag_inline_atomics)
6949 return NULL_RTX;
6951 /* Expand the operand. */
6952 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6954 return expand_atomic_load (target, mem, model);
6958 /* Expand the __atomic_store intrinsic:
6959 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6960 EXP is the CALL_EXPR.
6961 TARGET is an optional place for us to store the results. */
6963 static rtx
6964 expand_builtin_atomic_store (machine_mode mode, tree exp)
6966 rtx mem, val;
6967 enum memmodel model;
6969 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6970 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6971 || is_mm_release (model)))
6973 location_t loc
6974 = expansion_point_location_if_in_system_header (input_location);
6975 warning_at (loc, OPT_Winvalid_memory_model,
6976 "invalid memory model for %<__atomic_store%>");
6977 model = MEMMODEL_SEQ_CST;
6980 if (!flag_inline_atomics)
6981 return NULL_RTX;
6983 /* Expand the operands. */
6984 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6985 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6987 return expand_atomic_store (mem, val, model, false);
6990 /* Expand the __atomic_fetch_XXX intrinsic:
6991 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6992 EXP is the CALL_EXPR.
6993 TARGET is an optional place for us to store the results.
6994 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6995 FETCH_AFTER is true if returning the result of the operation.
6996 FETCH_AFTER is false if returning the value before the operation.
6997 IGNORE is true if the result is not used.
6998 EXT_CALL is the correct builtin for an external call if this cannot be
6999 resolved to an instruction sequence. */
7001 static rtx
7002 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
7003 enum rtx_code code, bool fetch_after,
7004 bool ignore, enum built_in_function ext_call)
7006 rtx val, mem, ret;
7007 enum memmodel model;
7008 tree fndecl;
7009 tree addr;
7011 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
7013 /* Expand the operands. */
7014 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7015 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
7017 /* Only try generating instructions if inlining is turned on. */
7018 if (flag_inline_atomics)
7020 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
7021 if (ret)
7022 return ret;
7025 /* Return if a different routine isn't needed for the library call. */
7026 if (ext_call == BUILT_IN_NONE)
7027 return NULL_RTX;
7029 /* Change the call to the specified function. */
7030 fndecl = get_callee_fndecl (exp);
7031 addr = CALL_EXPR_FN (exp);
7032 STRIP_NOPS (addr);
7034 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
7035 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
7037 /* If we will emit code after the call, the call cannot be a tail call.
7038 If it is emitted as a tail call, a barrier is emitted after it, and
7039 then all trailing code is removed. */
7040 if (!ignore)
7041 CALL_EXPR_TAILCALL (exp) = 0;
7043 /* Expand the call here so we can emit trailing code. */
7044 ret = expand_call (exp, target, ignore);
7046 /* Replace the original function just in case it matters. */
7047 TREE_OPERAND (addr, 0) = fndecl;
7049 /* Then issue the arithmetic correction to return the right result. */
7050 if (!ignore)
7052 if (code == NOT)
7054 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
7055 OPTAB_LIB_WIDEN);
7056 ret = expand_simple_unop (mode, NOT, ret, target, true);
7058 else
7059 ret = expand_simple_binop (mode, code, ret, val, target, true,
7060 OPTAB_LIB_WIDEN);
7062 return ret;
7065 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
7067 void
7068 expand_ifn_atomic_bit_test_and (gcall *call)
7070 tree ptr = gimple_call_arg (call, 0);
7071 tree bit = gimple_call_arg (call, 1);
7072 tree flag = gimple_call_arg (call, 2);
7073 tree lhs = gimple_call_lhs (call);
7074 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
7075 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
7076 enum rtx_code code;
7077 optab optab;
7078 class expand_operand ops[5];
7080 gcc_assert (flag_inline_atomics);
7082 if (gimple_call_num_args (call) == 4)
7083 model = get_memmodel (gimple_call_arg (call, 3));
7085 rtx mem = get_builtin_sync_mem (ptr, mode);
7086 rtx val = expand_expr_force_mode (bit, mode);
7088 switch (gimple_call_internal_fn (call))
7090 case IFN_ATOMIC_BIT_TEST_AND_SET:
7091 code = IOR;
7092 optab = atomic_bit_test_and_set_optab;
7093 break;
7094 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
7095 code = XOR;
7096 optab = atomic_bit_test_and_complement_optab;
7097 break;
7098 case IFN_ATOMIC_BIT_TEST_AND_RESET:
7099 code = AND;
7100 optab = atomic_bit_test_and_reset_optab;
7101 break;
7102 default:
7103 gcc_unreachable ();
7106 if (lhs == NULL_TREE)
7108 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
7109 val, NULL_RTX, true, OPTAB_DIRECT);
7110 if (code == AND)
7111 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
7112 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
7113 return;
7116 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
7117 enum insn_code icode = direct_optab_handler (optab, mode);
7118 gcc_assert (icode != CODE_FOR_nothing);
7119 create_output_operand (&ops[0], target, mode);
7120 create_fixed_operand (&ops[1], mem);
7121 create_convert_operand_to (&ops[2], val, mode, true);
7122 create_integer_operand (&ops[3], model);
7123 create_integer_operand (&ops[4], integer_onep (flag));
7124 if (maybe_expand_insn (icode, 5, ops))
7125 return;
7127 rtx bitval = val;
7128 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
7129 val, NULL_RTX, true, OPTAB_DIRECT);
7130 rtx maskval = val;
7131 if (code == AND)
7132 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
7133 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
7134 code, model, false);
7135 if (integer_onep (flag))
7137 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
7138 NULL_RTX, true, OPTAB_DIRECT);
7139 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
7140 true, OPTAB_DIRECT);
7142 else
7143 result = expand_simple_binop (mode, AND, result, maskval, target, true,
7144 OPTAB_DIRECT);
7145 if (result != target)
7146 emit_move_insn (target, result);
7149 /* Expand an atomic clear operation.
7150 void _atomic_clear (BOOL *obj, enum memmodel)
7151 EXP is the call expression. */
7153 static rtx
7154 expand_builtin_atomic_clear (tree exp)
7156 machine_mode mode;
7157 rtx mem, ret;
7158 enum memmodel model;
7160 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
7161 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7162 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7164 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
7166 location_t loc
7167 = expansion_point_location_if_in_system_header (input_location);
7168 warning_at (loc, OPT_Winvalid_memory_model,
7169 "invalid memory model for %<__atomic_store%>");
7170 model = MEMMODEL_SEQ_CST;
7173 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
7174 Failing that, a store is issued by __atomic_store. The only way this can
7175 fail is if the bool type is larger than a word size. Unlikely, but
7176 handle it anyway for completeness. Assume a single threaded model since
7177 there is no atomic support in this case, and no barriers are required. */
7178 ret = expand_atomic_store (mem, const0_rtx, model, true);
7179 if (!ret)
7180 emit_move_insn (mem, const0_rtx);
7181 return const0_rtx;
7184 /* Expand an atomic test_and_set operation.
7185 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
7186 EXP is the call expression. */
7188 static rtx
7189 expand_builtin_atomic_test_and_set (tree exp, rtx target)
7191 rtx mem;
7192 enum memmodel model;
7193 machine_mode mode;
7195 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
7196 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7197 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7199 return expand_atomic_test_and_set (target, mem, model);
7203 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
7204 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
7206 static tree
7207 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
7209 int size;
7210 machine_mode mode;
7211 unsigned int mode_align, type_align;
7213 if (TREE_CODE (arg0) != INTEGER_CST)
7214 return NULL_TREE;
7216 /* We need a corresponding integer mode for the access to be lock-free. */
7217 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
7218 if (!int_mode_for_size (size, 0).exists (&mode))
7219 return boolean_false_node;
7221 mode_align = GET_MODE_ALIGNMENT (mode);
7223 if (TREE_CODE (arg1) == INTEGER_CST)
7225 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
7227 /* Either this argument is null, or it's a fake pointer encoding
7228 the alignment of the object. */
7229 val = least_bit_hwi (val);
7230 val *= BITS_PER_UNIT;
7232 if (val == 0 || mode_align < val)
7233 type_align = mode_align;
7234 else
7235 type_align = val;
7237 else
7239 tree ttype = TREE_TYPE (arg1);
7241 /* This function is usually invoked and folded immediately by the front
7242 end before anything else has a chance to look at it. The pointer
7243 parameter at this point is usually cast to a void *, so check for that
7244 and look past the cast. */
7245 if (CONVERT_EXPR_P (arg1)
7246 && POINTER_TYPE_P (ttype)
7247 && VOID_TYPE_P (TREE_TYPE (ttype))
7248 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
7249 arg1 = TREE_OPERAND (arg1, 0);
7251 ttype = TREE_TYPE (arg1);
7252 gcc_assert (POINTER_TYPE_P (ttype));
7254 /* Get the underlying type of the object. */
7255 ttype = TREE_TYPE (ttype);
7256 type_align = TYPE_ALIGN (ttype);
7259 /* If the object has smaller alignment, the lock free routines cannot
7260 be used. */
7261 if (type_align < mode_align)
7262 return boolean_false_node;
7264 /* Check if a compare_and_swap pattern exists for the mode which represents
7265 the required size. The pattern is not allowed to fail, so the existence
7266 of the pattern indicates support is present. Also require that an
7267 atomic load exists for the required size. */
7268 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
7269 return boolean_true_node;
7270 else
7271 return boolean_false_node;
7274 /* Return true if the parameters to call EXP represent an object which will
7275 always generate lock free instructions. The first argument represents the
7276 size of the object, and the second parameter is a pointer to the object
7277 itself. If NULL is passed for the object, then the result is based on
7278 typical alignment for an object of the specified size. Otherwise return
7279 false. */
7281 static rtx
7282 expand_builtin_atomic_always_lock_free (tree exp)
7284 tree size;
7285 tree arg0 = CALL_EXPR_ARG (exp, 0);
7286 tree arg1 = CALL_EXPR_ARG (exp, 1);
7288 if (TREE_CODE (arg0) != INTEGER_CST)
7290 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
7291 return const0_rtx;
7294 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
7295 if (size == boolean_true_node)
7296 return const1_rtx;
7297 return const0_rtx;
7300 /* Return a one or zero if it can be determined that object ARG1 of size ARG
7301 is lock free on this architecture. */
7303 static tree
7304 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
7306 if (!flag_inline_atomics)
7307 return NULL_TREE;
7309 /* If it isn't always lock free, don't generate a result. */
7310 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
7311 return boolean_true_node;
7313 return NULL_TREE;
7316 /* Return true if the parameters to call EXP represent an object which will
7317 always generate lock free instructions. The first argument represents the
7318 size of the object, and the second parameter is a pointer to the object
7319 itself. If NULL is passed for the object, then the result is based on
7320 typical alignment for an object of the specified size. Otherwise return
7321 NULL*/
7323 static rtx
7324 expand_builtin_atomic_is_lock_free (tree exp)
7326 tree size;
7327 tree arg0 = CALL_EXPR_ARG (exp, 0);
7328 tree arg1 = CALL_EXPR_ARG (exp, 1);
7330 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
7332 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
7333 return NULL_RTX;
7336 if (!flag_inline_atomics)
7337 return NULL_RTX;
7339 /* If the value is known at compile time, return the RTX for it. */
7340 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
7341 if (size == boolean_true_node)
7342 return const1_rtx;
7344 return NULL_RTX;
7347 /* Expand the __atomic_thread_fence intrinsic:
7348 void __atomic_thread_fence (enum memmodel)
7349 EXP is the CALL_EXPR. */
7351 static void
7352 expand_builtin_atomic_thread_fence (tree exp)
7354 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
7355 expand_mem_thread_fence (model);
7358 /* Expand the __atomic_signal_fence intrinsic:
7359 void __atomic_signal_fence (enum memmodel)
7360 EXP is the CALL_EXPR. */
7362 static void
7363 expand_builtin_atomic_signal_fence (tree exp)
7365 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
7366 expand_mem_signal_fence (model);
7369 /* Expand the __sync_synchronize intrinsic. */
7371 static void
7372 expand_builtin_sync_synchronize (void)
7374 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
7377 static rtx
7378 expand_builtin_thread_pointer (tree exp, rtx target)
7380 enum insn_code icode;
7381 if (!validate_arglist (exp, VOID_TYPE))
7382 return const0_rtx;
7383 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
7384 if (icode != CODE_FOR_nothing)
7386 class expand_operand op;
7387 /* If the target is not sutitable then create a new target. */
7388 if (target == NULL_RTX
7389 || !REG_P (target)
7390 || GET_MODE (target) != Pmode)
7391 target = gen_reg_rtx (Pmode);
7392 create_output_operand (&op, target, Pmode);
7393 expand_insn (icode, 1, &op);
7394 return target;
7396 error ("%<__builtin_thread_pointer%> is not supported on this target");
7397 return const0_rtx;
7400 static void
7401 expand_builtin_set_thread_pointer (tree exp)
7403 enum insn_code icode;
7404 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7405 return;
7406 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
7407 if (icode != CODE_FOR_nothing)
7409 class expand_operand op;
7410 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
7411 Pmode, EXPAND_NORMAL);
7412 create_input_operand (&op, val, Pmode);
7413 expand_insn (icode, 1, &op);
7414 return;
7416 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
7420 /* Emit code to restore the current value of stack. */
7422 static void
7423 expand_stack_restore (tree var)
7425 rtx_insn *prev;
7426 rtx sa = expand_normal (var);
7428 sa = convert_memory_address (Pmode, sa);
7430 prev = get_last_insn ();
7431 emit_stack_restore (SAVE_BLOCK, sa);
7433 record_new_stack_level ();
7435 fixup_args_size_notes (prev, get_last_insn (), 0);
7438 /* Emit code to save the current value of stack. */
7440 static rtx
7441 expand_stack_save (void)
7443 rtx ret = NULL_RTX;
7445 emit_stack_save (SAVE_BLOCK, &ret);
7446 return ret;
7449 /* Emit code to get the openacc gang, worker or vector id or size. */
7451 static rtx
7452 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
7454 const char *name;
7455 rtx fallback_retval;
7456 rtx_insn *(*gen_fn) (rtx, rtx);
7457 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
7459 case BUILT_IN_GOACC_PARLEVEL_ID:
7460 name = "__builtin_goacc_parlevel_id";
7461 fallback_retval = const0_rtx;
7462 gen_fn = targetm.gen_oacc_dim_pos;
7463 break;
7464 case BUILT_IN_GOACC_PARLEVEL_SIZE:
7465 name = "__builtin_goacc_parlevel_size";
7466 fallback_retval = const1_rtx;
7467 gen_fn = targetm.gen_oacc_dim_size;
7468 break;
7469 default:
7470 gcc_unreachable ();
7473 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
7475 error ("%qs only supported in OpenACC code", name);
7476 return const0_rtx;
7479 tree arg = CALL_EXPR_ARG (exp, 0);
7480 if (TREE_CODE (arg) != INTEGER_CST)
7482 error ("non-constant argument 0 to %qs", name);
7483 return const0_rtx;
7486 int dim = TREE_INT_CST_LOW (arg);
7487 switch (dim)
7489 case GOMP_DIM_GANG:
7490 case GOMP_DIM_WORKER:
7491 case GOMP_DIM_VECTOR:
7492 break;
7493 default:
7494 error ("illegal argument 0 to %qs", name);
7495 return const0_rtx;
7498 if (ignore)
7499 return target;
7501 if (target == NULL_RTX)
7502 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7504 if (!targetm.have_oacc_dim_size ())
7506 emit_move_insn (target, fallback_retval);
7507 return target;
7510 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
7511 emit_insn (gen_fn (reg, GEN_INT (dim)));
7512 if (reg != target)
7513 emit_move_insn (target, reg);
7515 return target;
7518 /* Expand a string compare operation using a sequence of char comparison
7519 to get rid of the calling overhead, with result going to TARGET if
7520 that's convenient.
7522 VAR_STR is the variable string source;
7523 CONST_STR is the constant string source;
7524 LENGTH is the number of chars to compare;
7525 CONST_STR_N indicates which source string is the constant string;
7526 IS_MEMCMP indicates whether it's a memcmp or strcmp.
7528 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7530 target = (int) (unsigned char) var_str[0]
7531 - (int) (unsigned char) const_str[0];
7532 if (target != 0)
7533 goto ne_label;
7535 target = (int) (unsigned char) var_str[length - 2]
7536 - (int) (unsigned char) const_str[length - 2];
7537 if (target != 0)
7538 goto ne_label;
7539 target = (int) (unsigned char) var_str[length - 1]
7540 - (int) (unsigned char) const_str[length - 1];
7541 ne_label:
7544 static rtx
7545 inline_string_cmp (rtx target, tree var_str, const char *const_str,
7546 unsigned HOST_WIDE_INT length,
7547 int const_str_n, machine_mode mode)
7549 HOST_WIDE_INT offset = 0;
7550 rtx var_rtx_array
7551 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
7552 rtx var_rtx = NULL_RTX;
7553 rtx const_rtx = NULL_RTX;
7554 rtx result = target ? target : gen_reg_rtx (mode);
7555 rtx_code_label *ne_label = gen_label_rtx ();
7556 tree unit_type_node = unsigned_char_type_node;
7557 scalar_int_mode unit_mode
7558 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
7560 start_sequence ();
7562 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
7564 var_rtx
7565 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
7566 const_rtx = c_readstr (const_str + offset, unit_mode);
7567 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
7568 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
7570 op0 = convert_modes (mode, unit_mode, op0, 1);
7571 op1 = convert_modes (mode, unit_mode, op1, 1);
7572 result = expand_simple_binop (mode, MINUS, op0, op1,
7573 result, 1, OPTAB_WIDEN);
7574 if (i < length - 1)
7575 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
7576 mode, true, ne_label);
7577 offset += GET_MODE_SIZE (unit_mode);
7580 emit_label (ne_label);
7581 rtx_insn *insns = get_insns ();
7582 end_sequence ();
7583 emit_insn (insns);
7585 return result;
7588 /* Inline expansion a call to str(n)cmp, with result going to
7589 TARGET if that's convenient.
7590 If the call is not been inlined, return NULL_RTX. */
7591 static rtx
7592 inline_expand_builtin_string_cmp (tree exp, rtx target)
7594 tree fndecl = get_callee_fndecl (exp);
7595 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7596 unsigned HOST_WIDE_INT length = 0;
7597 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
7599 /* Do NOT apply this inlining expansion when optimizing for size or
7600 optimization level below 2. */
7601 if (optimize < 2 || optimize_insn_for_size_p ())
7602 return NULL_RTX;
7604 gcc_checking_assert (fcode == BUILT_IN_STRCMP
7605 || fcode == BUILT_IN_STRNCMP
7606 || fcode == BUILT_IN_MEMCMP);
7608 /* On a target where the type of the call (int) has same or narrower presicion
7609 than unsigned char, give up the inlining expansion. */
7610 if (TYPE_PRECISION (unsigned_char_type_node)
7611 >= TYPE_PRECISION (TREE_TYPE (exp)))
7612 return NULL_RTX;
7614 tree arg1 = CALL_EXPR_ARG (exp, 0);
7615 tree arg2 = CALL_EXPR_ARG (exp, 1);
7616 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
7618 unsigned HOST_WIDE_INT len1 = 0;
7619 unsigned HOST_WIDE_INT len2 = 0;
7620 unsigned HOST_WIDE_INT len3 = 0;
7622 const char *src_str1 = c_getstr (arg1, &len1);
7623 const char *src_str2 = c_getstr (arg2, &len2);
7625 /* If neither strings is constant string, the call is not qualify. */
7626 if (!src_str1 && !src_str2)
7627 return NULL_RTX;
7629 /* For strncmp, if the length is not a const, not qualify. */
7630 if (is_ncmp)
7632 if (!tree_fits_uhwi_p (len3_tree))
7633 return NULL_RTX;
7634 else
7635 len3 = tree_to_uhwi (len3_tree);
7638 if (src_str1 != NULL)
7639 len1 = strnlen (src_str1, len1) + 1;
7641 if (src_str2 != NULL)
7642 len2 = strnlen (src_str2, len2) + 1;
7644 int const_str_n = 0;
7645 if (!len1)
7646 const_str_n = 2;
7647 else if (!len2)
7648 const_str_n = 1;
7649 else if (len2 > len1)
7650 const_str_n = 1;
7651 else
7652 const_str_n = 2;
7654 gcc_checking_assert (const_str_n > 0);
7655 length = (const_str_n == 1) ? len1 : len2;
7657 if (is_ncmp && len3 < length)
7658 length = len3;
7660 /* If the length of the comparision is larger than the threshold,
7661 do nothing. */
7662 if (length > (unsigned HOST_WIDE_INT)
7663 param_builtin_string_cmp_inline_length)
7664 return NULL_RTX;
7666 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7668 /* Now, start inline expansion the call. */
7669 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
7670 (const_str_n == 1) ? src_str1 : src_str2, length,
7671 const_str_n, mode);
7674 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7675 represents the size of the first argument to that call, or VOIDmode
7676 if the argument is a pointer. IGNORE will be true if the result
7677 isn't used. */
7678 static rtx
7679 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
7680 bool ignore)
7682 rtx val, failsafe;
7683 unsigned nargs = call_expr_nargs (exp);
7685 tree arg0 = CALL_EXPR_ARG (exp, 0);
7687 if (mode == VOIDmode)
7689 mode = TYPE_MODE (TREE_TYPE (arg0));
7690 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
7693 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
7695 /* An optional second argument can be used as a failsafe value on
7696 some machines. If it isn't present, then the failsafe value is
7697 assumed to be 0. */
7698 if (nargs > 1)
7700 tree arg1 = CALL_EXPR_ARG (exp, 1);
7701 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
7703 else
7704 failsafe = const0_rtx;
7706 /* If the result isn't used, the behavior is undefined. It would be
7707 nice to emit a warning here, but path splitting means this might
7708 happen with legitimate code. So simply drop the builtin
7709 expansion in that case; we've handled any side-effects above. */
7710 if (ignore)
7711 return const0_rtx;
7713 /* If we don't have a suitable target, create one to hold the result. */
7714 if (target == NULL || GET_MODE (target) != mode)
7715 target = gen_reg_rtx (mode);
7717 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7718 val = convert_modes (mode, VOIDmode, val, false);
7720 return targetm.speculation_safe_value (mode, target, val, failsafe);
7723 /* Expand an expression EXP that calls a built-in function,
7724 with result going to TARGET if that's convenient
7725 (and in mode MODE if that's convenient).
7726 SUBTARGET may be used as the target for computing one of EXP's operands.
7727 IGNORE is nonzero if the value is to be ignored. */
7730 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7731 int ignore)
7733 tree fndecl = get_callee_fndecl (exp);
7734 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7735 int flags;
7737 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7738 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7740 /* When ASan is enabled, we don't want to expand some memory/string
7741 builtins and rely on libsanitizer's hooks. This allows us to avoid
7742 redundant checks and be sure, that possible overflow will be detected
7743 by ASan. */
7745 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7746 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7747 return expand_call (exp, target, ignore);
7749 /* When not optimizing, generate calls to library functions for a certain
7750 set of builtins. */
7751 if (!optimize
7752 && !called_as_built_in (fndecl)
7753 && fcode != BUILT_IN_FORK
7754 && fcode != BUILT_IN_EXECL
7755 && fcode != BUILT_IN_EXECV
7756 && fcode != BUILT_IN_EXECLP
7757 && fcode != BUILT_IN_EXECLE
7758 && fcode != BUILT_IN_EXECVP
7759 && fcode != BUILT_IN_EXECVE
7760 && !ALLOCA_FUNCTION_CODE_P (fcode)
7761 && fcode != BUILT_IN_FREE)
7762 return expand_call (exp, target, ignore);
7764 /* The built-in function expanders test for target == const0_rtx
7765 to determine whether the function's result will be ignored. */
7766 if (ignore)
7767 target = const0_rtx;
7769 /* If the result of a pure or const built-in function is ignored, and
7770 none of its arguments are volatile, we can avoid expanding the
7771 built-in call and just evaluate the arguments for side-effects. */
7772 if (target == const0_rtx
7773 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7774 && !(flags & ECF_LOOPING_CONST_OR_PURE))
7776 bool volatilep = false;
7777 tree arg;
7778 call_expr_arg_iterator iter;
7780 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7781 if (TREE_THIS_VOLATILE (arg))
7783 volatilep = true;
7784 break;
7787 if (! volatilep)
7789 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7790 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7791 return const0_rtx;
7795 switch (fcode)
7797 CASE_FLT_FN (BUILT_IN_FABS):
7798 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7799 case BUILT_IN_FABSD32:
7800 case BUILT_IN_FABSD64:
7801 case BUILT_IN_FABSD128:
7802 target = expand_builtin_fabs (exp, target, subtarget);
7803 if (target)
7804 return target;
7805 break;
7807 CASE_FLT_FN (BUILT_IN_COPYSIGN):
7808 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7809 target = expand_builtin_copysign (exp, target, subtarget);
7810 if (target)
7811 return target;
7812 break;
7814 /* Just do a normal library call if we were unable to fold
7815 the values. */
7816 CASE_FLT_FN (BUILT_IN_CABS):
7817 break;
7819 CASE_FLT_FN (BUILT_IN_FMA):
7820 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7821 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7822 if (target)
7823 return target;
7824 break;
7826 CASE_FLT_FN (BUILT_IN_ILOGB):
7827 if (! flag_unsafe_math_optimizations)
7828 break;
7829 gcc_fallthrough ();
7830 CASE_FLT_FN (BUILT_IN_ISINF):
7831 CASE_FLT_FN (BUILT_IN_FINITE):
7832 case BUILT_IN_ISFINITE:
7833 case BUILT_IN_ISNORMAL:
7834 target = expand_builtin_interclass_mathfn (exp, target);
7835 if (target)
7836 return target;
7837 break;
7839 CASE_FLT_FN (BUILT_IN_ICEIL):
7840 CASE_FLT_FN (BUILT_IN_LCEIL):
7841 CASE_FLT_FN (BUILT_IN_LLCEIL):
7842 CASE_FLT_FN (BUILT_IN_LFLOOR):
7843 CASE_FLT_FN (BUILT_IN_IFLOOR):
7844 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7845 target = expand_builtin_int_roundingfn (exp, target);
7846 if (target)
7847 return target;
7848 break;
7850 CASE_FLT_FN (BUILT_IN_IRINT):
7851 CASE_FLT_FN (BUILT_IN_LRINT):
7852 CASE_FLT_FN (BUILT_IN_LLRINT):
7853 CASE_FLT_FN (BUILT_IN_IROUND):
7854 CASE_FLT_FN (BUILT_IN_LROUND):
7855 CASE_FLT_FN (BUILT_IN_LLROUND):
7856 target = expand_builtin_int_roundingfn_2 (exp, target);
7857 if (target)
7858 return target;
7859 break;
7861 CASE_FLT_FN (BUILT_IN_POWI):
7862 target = expand_builtin_powi (exp, target);
7863 if (target)
7864 return target;
7865 break;
7867 CASE_FLT_FN (BUILT_IN_CEXPI):
7868 target = expand_builtin_cexpi (exp, target);
7869 gcc_assert (target);
7870 return target;
7872 CASE_FLT_FN (BUILT_IN_SIN):
7873 CASE_FLT_FN (BUILT_IN_COS):
7874 if (! flag_unsafe_math_optimizations)
7875 break;
7876 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7877 if (target)
7878 return target;
7879 break;
7881 CASE_FLT_FN (BUILT_IN_SINCOS):
7882 if (! flag_unsafe_math_optimizations)
7883 break;
7884 target = expand_builtin_sincos (exp);
7885 if (target)
7886 return target;
7887 break;
7889 case BUILT_IN_APPLY_ARGS:
7890 return expand_builtin_apply_args ();
7892 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7893 FUNCTION with a copy of the parameters described by
7894 ARGUMENTS, and ARGSIZE. It returns a block of memory
7895 allocated on the stack into which is stored all the registers
7896 that might possibly be used for returning the result of a
7897 function. ARGUMENTS is the value returned by
7898 __builtin_apply_args. ARGSIZE is the number of bytes of
7899 arguments that must be copied. ??? How should this value be
7900 computed? We'll also need a safe worst case value for varargs
7901 functions. */
7902 case BUILT_IN_APPLY:
7903 if (!validate_arglist (exp, POINTER_TYPE,
7904 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7905 && !validate_arglist (exp, REFERENCE_TYPE,
7906 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7907 return const0_rtx;
7908 else
7910 rtx ops[3];
7912 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7913 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7914 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7916 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7919 /* __builtin_return (RESULT) causes the function to return the
7920 value described by RESULT. RESULT is address of the block of
7921 memory returned by __builtin_apply. */
7922 case BUILT_IN_RETURN:
7923 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7924 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7925 return const0_rtx;
7927 case BUILT_IN_SAVEREGS:
7928 return expand_builtin_saveregs ();
7930 case BUILT_IN_VA_ARG_PACK:
7931 /* All valid uses of __builtin_va_arg_pack () are removed during
7932 inlining. */
7933 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
7934 return const0_rtx;
7936 case BUILT_IN_VA_ARG_PACK_LEN:
7937 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7938 inlining. */
7939 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
7940 return const0_rtx;
7942 /* Return the address of the first anonymous stack arg. */
7943 case BUILT_IN_NEXT_ARG:
7944 if (fold_builtin_next_arg (exp, false))
7945 return const0_rtx;
7946 return expand_builtin_next_arg ();
7948 case BUILT_IN_CLEAR_CACHE:
7949 target = expand_builtin___clear_cache (exp);
7950 if (target)
7951 return target;
7952 break;
7954 case BUILT_IN_CLASSIFY_TYPE:
7955 return expand_builtin_classify_type (exp);
7957 case BUILT_IN_CONSTANT_P:
7958 return const0_rtx;
7960 case BUILT_IN_FRAME_ADDRESS:
7961 case BUILT_IN_RETURN_ADDRESS:
7962 return expand_builtin_frame_address (fndecl, exp);
7964 /* Returns the address of the area where the structure is returned.
7965 0 otherwise. */
7966 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7967 if (call_expr_nargs (exp) != 0
7968 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7969 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7970 return const0_rtx;
7971 else
7972 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7974 CASE_BUILT_IN_ALLOCA:
7975 target = expand_builtin_alloca (exp);
7976 if (target)
7977 return target;
7978 break;
7980 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7981 return expand_asan_emit_allocas_unpoison (exp);
7983 case BUILT_IN_STACK_SAVE:
7984 return expand_stack_save ();
7986 case BUILT_IN_STACK_RESTORE:
7987 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7988 return const0_rtx;
7990 case BUILT_IN_BSWAP16:
7991 case BUILT_IN_BSWAP32:
7992 case BUILT_IN_BSWAP64:
7993 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7994 if (target)
7995 return target;
7996 break;
7998 CASE_INT_FN (BUILT_IN_FFS):
7999 target = expand_builtin_unop (target_mode, exp, target,
8000 subtarget, ffs_optab);
8001 if (target)
8002 return target;
8003 break;
8005 CASE_INT_FN (BUILT_IN_CLZ):
8006 target = expand_builtin_unop (target_mode, exp, target,
8007 subtarget, clz_optab);
8008 if (target)
8009 return target;
8010 break;
8012 CASE_INT_FN (BUILT_IN_CTZ):
8013 target = expand_builtin_unop (target_mode, exp, target,
8014 subtarget, ctz_optab);
8015 if (target)
8016 return target;
8017 break;
8019 CASE_INT_FN (BUILT_IN_CLRSB):
8020 target = expand_builtin_unop (target_mode, exp, target,
8021 subtarget, clrsb_optab);
8022 if (target)
8023 return target;
8024 break;
8026 CASE_INT_FN (BUILT_IN_POPCOUNT):
8027 target = expand_builtin_unop (target_mode, exp, target,
8028 subtarget, popcount_optab);
8029 if (target)
8030 return target;
8031 break;
8033 CASE_INT_FN (BUILT_IN_PARITY):
8034 target = expand_builtin_unop (target_mode, exp, target,
8035 subtarget, parity_optab);
8036 if (target)
8037 return target;
8038 break;
8040 case BUILT_IN_STRLEN:
8041 target = expand_builtin_strlen (exp, target, target_mode);
8042 if (target)
8043 return target;
8044 break;
8046 case BUILT_IN_STRNLEN:
8047 target = expand_builtin_strnlen (exp, target, target_mode);
8048 if (target)
8049 return target;
8050 break;
8052 case BUILT_IN_STRCAT:
8053 target = expand_builtin_strcat (exp);
8054 if (target)
8055 return target;
8056 break;
8058 case BUILT_IN_GETTEXT:
8059 case BUILT_IN_PUTS:
8060 case BUILT_IN_PUTS_UNLOCKED:
8061 case BUILT_IN_STRDUP:
8062 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8063 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0));
8064 break;
8066 case BUILT_IN_INDEX:
8067 case BUILT_IN_RINDEX:
8068 case BUILT_IN_STRCHR:
8069 case BUILT_IN_STRRCHR:
8070 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8071 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0));
8072 break;
8074 case BUILT_IN_FPUTS:
8075 case BUILT_IN_FPUTS_UNLOCKED:
8076 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8077 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0));
8078 break;
8080 case BUILT_IN_STRNDUP:
8081 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8082 check_nul_terminated_array (exp,
8083 CALL_EXPR_ARG (exp, 0),
8084 CALL_EXPR_ARG (exp, 1));
8085 break;
8087 case BUILT_IN_STRCASECMP:
8088 case BUILT_IN_STRSTR:
8089 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8091 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0));
8092 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 1));
8094 break;
8096 case BUILT_IN_STRCPY:
8097 target = expand_builtin_strcpy (exp, target);
8098 if (target)
8099 return target;
8100 break;
8102 case BUILT_IN_STRNCAT:
8103 target = expand_builtin_strncat (exp, target);
8104 if (target)
8105 return target;
8106 break;
8108 case BUILT_IN_STRNCPY:
8109 target = expand_builtin_strncpy (exp, target);
8110 if (target)
8111 return target;
8112 break;
8114 case BUILT_IN_STPCPY:
8115 target = expand_builtin_stpcpy (exp, target, mode);
8116 if (target)
8117 return target;
8118 break;
8120 case BUILT_IN_STPNCPY:
8121 target = expand_builtin_stpncpy (exp, target);
8122 if (target)
8123 return target;
8124 break;
8126 case BUILT_IN_MEMCHR:
8127 target = expand_builtin_memchr (exp, target);
8128 if (target)
8129 return target;
8130 break;
8132 case BUILT_IN_MEMCPY:
8133 target = expand_builtin_memcpy (exp, target);
8134 if (target)
8135 return target;
8136 break;
8138 case BUILT_IN_MEMMOVE:
8139 target = expand_builtin_memmove (exp, target);
8140 if (target)
8141 return target;
8142 break;
8144 case BUILT_IN_MEMPCPY:
8145 target = expand_builtin_mempcpy (exp, target);
8146 if (target)
8147 return target;
8148 break;
8150 case BUILT_IN_MEMSET:
8151 target = expand_builtin_memset (exp, target, mode);
8152 if (target)
8153 return target;
8154 break;
8156 case BUILT_IN_BZERO:
8157 target = expand_builtin_bzero (exp);
8158 if (target)
8159 return target;
8160 break;
8162 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
8163 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
8164 when changing it to a strcmp call. */
8165 case BUILT_IN_STRCMP_EQ:
8166 target = expand_builtin_memcmp (exp, target, true);
8167 if (target)
8168 return target;
8170 /* Change this call back to a BUILT_IN_STRCMP. */
8171 TREE_OPERAND (exp, 1)
8172 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
8174 /* Delete the last parameter. */
8175 unsigned int i;
8176 vec<tree, va_gc> *arg_vec;
8177 vec_alloc (arg_vec, 2);
8178 for (i = 0; i < 2; i++)
8179 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
8180 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
8181 /* FALLTHROUGH */
8183 case BUILT_IN_STRCMP:
8184 target = expand_builtin_strcmp (exp, target);
8185 if (target)
8186 return target;
8187 break;
8189 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
8190 back to a BUILT_IN_STRNCMP. */
8191 case BUILT_IN_STRNCMP_EQ:
8192 target = expand_builtin_memcmp (exp, target, true);
8193 if (target)
8194 return target;
8196 /* Change it back to a BUILT_IN_STRNCMP. */
8197 TREE_OPERAND (exp, 1)
8198 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
8199 /* FALLTHROUGH */
8201 case BUILT_IN_STRNCMP:
8202 target = expand_builtin_strncmp (exp, target, mode);
8203 if (target)
8204 return target;
8205 break;
8207 case BUILT_IN_BCMP:
8208 case BUILT_IN_MEMCMP:
8209 case BUILT_IN_MEMCMP_EQ:
8210 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
8211 if (target)
8212 return target;
8213 if (fcode == BUILT_IN_MEMCMP_EQ)
8215 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
8216 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
8218 break;
8220 case BUILT_IN_SETJMP:
8221 /* This should have been lowered to the builtins below. */
8222 gcc_unreachable ();
8224 case BUILT_IN_SETJMP_SETUP:
8225 /* __builtin_setjmp_setup is passed a pointer to an array of five words
8226 and the receiver label. */
8227 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8229 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
8230 VOIDmode, EXPAND_NORMAL);
8231 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
8232 rtx_insn *label_r = label_rtx (label);
8234 /* This is copied from the handling of non-local gotos. */
8235 expand_builtin_setjmp_setup (buf_addr, label_r);
8236 nonlocal_goto_handler_labels
8237 = gen_rtx_INSN_LIST (VOIDmode, label_r,
8238 nonlocal_goto_handler_labels);
8239 /* ??? Do not let expand_label treat us as such since we would
8240 not want to be both on the list of non-local labels and on
8241 the list of forced labels. */
8242 FORCED_LABEL (label) = 0;
8243 return const0_rtx;
8245 break;
8247 case BUILT_IN_SETJMP_RECEIVER:
8248 /* __builtin_setjmp_receiver is passed the receiver label. */
8249 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8251 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
8252 rtx_insn *label_r = label_rtx (label);
8254 expand_builtin_setjmp_receiver (label_r);
8255 return const0_rtx;
8257 break;
8259 /* __builtin_longjmp is passed a pointer to an array of five words.
8260 It's similar to the C library longjmp function but works with
8261 __builtin_setjmp above. */
8262 case BUILT_IN_LONGJMP:
8263 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8265 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
8266 VOIDmode, EXPAND_NORMAL);
8267 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
8269 if (value != const1_rtx)
8271 error ("%<__builtin_longjmp%> second argument must be 1");
8272 return const0_rtx;
8275 expand_builtin_longjmp (buf_addr, value);
8276 return const0_rtx;
8278 break;
8280 case BUILT_IN_NONLOCAL_GOTO:
8281 target = expand_builtin_nonlocal_goto (exp);
8282 if (target)
8283 return target;
8284 break;
8286 /* This updates the setjmp buffer that is its argument with the value
8287 of the current stack pointer. */
8288 case BUILT_IN_UPDATE_SETJMP_BUF:
8289 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8291 rtx buf_addr
8292 = expand_normal (CALL_EXPR_ARG (exp, 0));
8294 expand_builtin_update_setjmp_buf (buf_addr);
8295 return const0_rtx;
8297 break;
8299 case BUILT_IN_TRAP:
8300 expand_builtin_trap ();
8301 return const0_rtx;
8303 case BUILT_IN_UNREACHABLE:
8304 expand_builtin_unreachable ();
8305 return const0_rtx;
8307 CASE_FLT_FN (BUILT_IN_SIGNBIT):
8308 case BUILT_IN_SIGNBITD32:
8309 case BUILT_IN_SIGNBITD64:
8310 case BUILT_IN_SIGNBITD128:
8311 target = expand_builtin_signbit (exp, target);
8312 if (target)
8313 return target;
8314 break;
8316 /* Various hooks for the DWARF 2 __throw routine. */
8317 case BUILT_IN_UNWIND_INIT:
8318 expand_builtin_unwind_init ();
8319 return const0_rtx;
8320 case BUILT_IN_DWARF_CFA:
8321 return virtual_cfa_rtx;
8322 #ifdef DWARF2_UNWIND_INFO
8323 case BUILT_IN_DWARF_SP_COLUMN:
8324 return expand_builtin_dwarf_sp_column ();
8325 case BUILT_IN_INIT_DWARF_REG_SIZES:
8326 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
8327 return const0_rtx;
8328 #endif
8329 case BUILT_IN_FROB_RETURN_ADDR:
8330 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
8331 case BUILT_IN_EXTRACT_RETURN_ADDR:
8332 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
8333 case BUILT_IN_EH_RETURN:
8334 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
8335 CALL_EXPR_ARG (exp, 1));
8336 return const0_rtx;
8337 case BUILT_IN_EH_RETURN_DATA_REGNO:
8338 return expand_builtin_eh_return_data_regno (exp);
8339 case BUILT_IN_EXTEND_POINTER:
8340 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
8341 case BUILT_IN_EH_POINTER:
8342 return expand_builtin_eh_pointer (exp);
8343 case BUILT_IN_EH_FILTER:
8344 return expand_builtin_eh_filter (exp);
8345 case BUILT_IN_EH_COPY_VALUES:
8346 return expand_builtin_eh_copy_values (exp);
8348 case BUILT_IN_VA_START:
8349 return expand_builtin_va_start (exp);
8350 case BUILT_IN_VA_END:
8351 return expand_builtin_va_end (exp);
8352 case BUILT_IN_VA_COPY:
8353 return expand_builtin_va_copy (exp);
8354 case BUILT_IN_EXPECT:
8355 return expand_builtin_expect (exp, target);
8356 case BUILT_IN_EXPECT_WITH_PROBABILITY:
8357 return expand_builtin_expect_with_probability (exp, target);
8358 case BUILT_IN_ASSUME_ALIGNED:
8359 return expand_builtin_assume_aligned (exp, target);
8360 case BUILT_IN_PREFETCH:
8361 expand_builtin_prefetch (exp);
8362 return const0_rtx;
8364 case BUILT_IN_INIT_TRAMPOLINE:
8365 return expand_builtin_init_trampoline (exp, true);
8366 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
8367 return expand_builtin_init_trampoline (exp, false);
8368 case BUILT_IN_ADJUST_TRAMPOLINE:
8369 return expand_builtin_adjust_trampoline (exp);
8371 case BUILT_IN_INIT_DESCRIPTOR:
8372 return expand_builtin_init_descriptor (exp);
8373 case BUILT_IN_ADJUST_DESCRIPTOR:
8374 return expand_builtin_adjust_descriptor (exp);
8376 case BUILT_IN_FORK:
8377 case BUILT_IN_EXECL:
8378 case BUILT_IN_EXECV:
8379 case BUILT_IN_EXECLP:
8380 case BUILT_IN_EXECLE:
8381 case BUILT_IN_EXECVP:
8382 case BUILT_IN_EXECVE:
8383 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
8384 if (target)
8385 return target;
8386 break;
8388 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
8389 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
8390 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
8391 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
8392 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
8393 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
8394 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
8395 if (target)
8396 return target;
8397 break;
8399 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
8400 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
8401 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
8402 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
8403 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
8404 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
8405 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
8406 if (target)
8407 return target;
8408 break;
8410 case BUILT_IN_SYNC_FETCH_AND_OR_1:
8411 case BUILT_IN_SYNC_FETCH_AND_OR_2:
8412 case BUILT_IN_SYNC_FETCH_AND_OR_4:
8413 case BUILT_IN_SYNC_FETCH_AND_OR_8:
8414 case BUILT_IN_SYNC_FETCH_AND_OR_16:
8415 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
8416 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
8417 if (target)
8418 return target;
8419 break;
8421 case BUILT_IN_SYNC_FETCH_AND_AND_1:
8422 case BUILT_IN_SYNC_FETCH_AND_AND_2:
8423 case BUILT_IN_SYNC_FETCH_AND_AND_4:
8424 case BUILT_IN_SYNC_FETCH_AND_AND_8:
8425 case BUILT_IN_SYNC_FETCH_AND_AND_16:
8426 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
8427 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
8428 if (target)
8429 return target;
8430 break;
8432 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
8433 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
8434 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
8435 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
8436 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
8437 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
8438 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
8439 if (target)
8440 return target;
8441 break;
8443 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
8444 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
8445 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
8446 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
8447 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
8448 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
8449 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
8450 if (target)
8451 return target;
8452 break;
8454 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
8455 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
8456 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
8457 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
8458 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
8459 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
8460 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
8461 if (target)
8462 return target;
8463 break;
8465 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
8466 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
8467 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
8468 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
8469 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
8470 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
8471 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
8472 if (target)
8473 return target;
8474 break;
8476 case BUILT_IN_SYNC_OR_AND_FETCH_1:
8477 case BUILT_IN_SYNC_OR_AND_FETCH_2:
8478 case BUILT_IN_SYNC_OR_AND_FETCH_4:
8479 case BUILT_IN_SYNC_OR_AND_FETCH_8:
8480 case BUILT_IN_SYNC_OR_AND_FETCH_16:
8481 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
8482 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
8483 if (target)
8484 return target;
8485 break;
8487 case BUILT_IN_SYNC_AND_AND_FETCH_1:
8488 case BUILT_IN_SYNC_AND_AND_FETCH_2:
8489 case BUILT_IN_SYNC_AND_AND_FETCH_4:
8490 case BUILT_IN_SYNC_AND_AND_FETCH_8:
8491 case BUILT_IN_SYNC_AND_AND_FETCH_16:
8492 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
8493 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
8494 if (target)
8495 return target;
8496 break;
8498 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
8499 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
8500 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
8501 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
8502 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
8503 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
8504 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
8505 if (target)
8506 return target;
8507 break;
8509 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
8510 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
8511 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
8512 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
8513 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
8514 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
8515 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
8516 if (target)
8517 return target;
8518 break;
8520 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
8521 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
8522 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
8523 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
8524 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
8525 if (mode == VOIDmode)
8526 mode = TYPE_MODE (boolean_type_node);
8527 if (!target || !register_operand (target, mode))
8528 target = gen_reg_rtx (mode);
8530 mode = get_builtin_sync_mode
8531 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
8532 target = expand_builtin_compare_and_swap (mode, exp, true, target);
8533 if (target)
8534 return target;
8535 break;
8537 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
8538 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
8539 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
8540 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
8541 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
8542 mode = get_builtin_sync_mode
8543 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
8544 target = expand_builtin_compare_and_swap (mode, exp, false, target);
8545 if (target)
8546 return target;
8547 break;
8549 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
8550 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
8551 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
8552 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
8553 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
8554 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
8555 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
8556 if (target)
8557 return target;
8558 break;
8560 case BUILT_IN_SYNC_LOCK_RELEASE_1:
8561 case BUILT_IN_SYNC_LOCK_RELEASE_2:
8562 case BUILT_IN_SYNC_LOCK_RELEASE_4:
8563 case BUILT_IN_SYNC_LOCK_RELEASE_8:
8564 case BUILT_IN_SYNC_LOCK_RELEASE_16:
8565 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
8566 expand_builtin_sync_lock_release (mode, exp);
8567 return const0_rtx;
8569 case BUILT_IN_SYNC_SYNCHRONIZE:
8570 expand_builtin_sync_synchronize ();
8571 return const0_rtx;
8573 case BUILT_IN_ATOMIC_EXCHANGE_1:
8574 case BUILT_IN_ATOMIC_EXCHANGE_2:
8575 case BUILT_IN_ATOMIC_EXCHANGE_4:
8576 case BUILT_IN_ATOMIC_EXCHANGE_8:
8577 case BUILT_IN_ATOMIC_EXCHANGE_16:
8578 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
8579 target = expand_builtin_atomic_exchange (mode, exp, target);
8580 if (target)
8581 return target;
8582 break;
8584 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
8585 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
8586 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
8587 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
8588 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
8590 unsigned int nargs, z;
8591 vec<tree, va_gc> *vec;
8593 mode =
8594 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
8595 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
8596 if (target)
8597 return target;
8599 /* If this is turned into an external library call, the weak parameter
8600 must be dropped to match the expected parameter list. */
8601 nargs = call_expr_nargs (exp);
8602 vec_alloc (vec, nargs - 1);
8603 for (z = 0; z < 3; z++)
8604 vec->quick_push (CALL_EXPR_ARG (exp, z));
8605 /* Skip the boolean weak parameter. */
8606 for (z = 4; z < 6; z++)
8607 vec->quick_push (CALL_EXPR_ARG (exp, z));
8608 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
8609 break;
8612 case BUILT_IN_ATOMIC_LOAD_1:
8613 case BUILT_IN_ATOMIC_LOAD_2:
8614 case BUILT_IN_ATOMIC_LOAD_4:
8615 case BUILT_IN_ATOMIC_LOAD_8:
8616 case BUILT_IN_ATOMIC_LOAD_16:
8617 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
8618 target = expand_builtin_atomic_load (mode, exp, target);
8619 if (target)
8620 return target;
8621 break;
8623 case BUILT_IN_ATOMIC_STORE_1:
8624 case BUILT_IN_ATOMIC_STORE_2:
8625 case BUILT_IN_ATOMIC_STORE_4:
8626 case BUILT_IN_ATOMIC_STORE_8:
8627 case BUILT_IN_ATOMIC_STORE_16:
8628 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
8629 target = expand_builtin_atomic_store (mode, exp);
8630 if (target)
8631 return const0_rtx;
8632 break;
8634 case BUILT_IN_ATOMIC_ADD_FETCH_1:
8635 case BUILT_IN_ATOMIC_ADD_FETCH_2:
8636 case BUILT_IN_ATOMIC_ADD_FETCH_4:
8637 case BUILT_IN_ATOMIC_ADD_FETCH_8:
8638 case BUILT_IN_ATOMIC_ADD_FETCH_16:
8640 enum built_in_function lib;
8641 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
8642 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
8643 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
8644 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
8645 ignore, lib);
8646 if (target)
8647 return target;
8648 break;
8650 case BUILT_IN_ATOMIC_SUB_FETCH_1:
8651 case BUILT_IN_ATOMIC_SUB_FETCH_2:
8652 case BUILT_IN_ATOMIC_SUB_FETCH_4:
8653 case BUILT_IN_ATOMIC_SUB_FETCH_8:
8654 case BUILT_IN_ATOMIC_SUB_FETCH_16:
8656 enum built_in_function lib;
8657 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
8658 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
8659 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
8660 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
8661 ignore, lib);
8662 if (target)
8663 return target;
8664 break;
8666 case BUILT_IN_ATOMIC_AND_FETCH_1:
8667 case BUILT_IN_ATOMIC_AND_FETCH_2:
8668 case BUILT_IN_ATOMIC_AND_FETCH_4:
8669 case BUILT_IN_ATOMIC_AND_FETCH_8:
8670 case BUILT_IN_ATOMIC_AND_FETCH_16:
8672 enum built_in_function lib;
8673 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
8674 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
8675 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
8676 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
8677 ignore, lib);
8678 if (target)
8679 return target;
8680 break;
8682 case BUILT_IN_ATOMIC_NAND_FETCH_1:
8683 case BUILT_IN_ATOMIC_NAND_FETCH_2:
8684 case BUILT_IN_ATOMIC_NAND_FETCH_4:
8685 case BUILT_IN_ATOMIC_NAND_FETCH_8:
8686 case BUILT_IN_ATOMIC_NAND_FETCH_16:
8688 enum built_in_function lib;
8689 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
8690 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
8691 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
8692 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
8693 ignore, lib);
8694 if (target)
8695 return target;
8696 break;
8698 case BUILT_IN_ATOMIC_XOR_FETCH_1:
8699 case BUILT_IN_ATOMIC_XOR_FETCH_2:
8700 case BUILT_IN_ATOMIC_XOR_FETCH_4:
8701 case BUILT_IN_ATOMIC_XOR_FETCH_8:
8702 case BUILT_IN_ATOMIC_XOR_FETCH_16:
8704 enum built_in_function lib;
8705 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
8706 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
8707 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
8708 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
8709 ignore, lib);
8710 if (target)
8711 return target;
8712 break;
8714 case BUILT_IN_ATOMIC_OR_FETCH_1:
8715 case BUILT_IN_ATOMIC_OR_FETCH_2:
8716 case BUILT_IN_ATOMIC_OR_FETCH_4:
8717 case BUILT_IN_ATOMIC_OR_FETCH_8:
8718 case BUILT_IN_ATOMIC_OR_FETCH_16:
8720 enum built_in_function lib;
8721 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
8722 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
8723 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
8724 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
8725 ignore, lib);
8726 if (target)
8727 return target;
8728 break;
8730 case BUILT_IN_ATOMIC_FETCH_ADD_1:
8731 case BUILT_IN_ATOMIC_FETCH_ADD_2:
8732 case BUILT_IN_ATOMIC_FETCH_ADD_4:
8733 case BUILT_IN_ATOMIC_FETCH_ADD_8:
8734 case BUILT_IN_ATOMIC_FETCH_ADD_16:
8735 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
8736 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
8737 ignore, BUILT_IN_NONE);
8738 if (target)
8739 return target;
8740 break;
8742 case BUILT_IN_ATOMIC_FETCH_SUB_1:
8743 case BUILT_IN_ATOMIC_FETCH_SUB_2:
8744 case BUILT_IN_ATOMIC_FETCH_SUB_4:
8745 case BUILT_IN_ATOMIC_FETCH_SUB_8:
8746 case BUILT_IN_ATOMIC_FETCH_SUB_16:
8747 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
8748 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
8749 ignore, BUILT_IN_NONE);
8750 if (target)
8751 return target;
8752 break;
8754 case BUILT_IN_ATOMIC_FETCH_AND_1:
8755 case BUILT_IN_ATOMIC_FETCH_AND_2:
8756 case BUILT_IN_ATOMIC_FETCH_AND_4:
8757 case BUILT_IN_ATOMIC_FETCH_AND_8:
8758 case BUILT_IN_ATOMIC_FETCH_AND_16:
8759 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8760 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8761 ignore, BUILT_IN_NONE);
8762 if (target)
8763 return target;
8764 break;
8766 case BUILT_IN_ATOMIC_FETCH_NAND_1:
8767 case BUILT_IN_ATOMIC_FETCH_NAND_2:
8768 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8769 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8770 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8771 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8772 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8773 ignore, BUILT_IN_NONE);
8774 if (target)
8775 return target;
8776 break;
8778 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8779 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8780 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8781 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8782 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8783 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8784 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8785 ignore, BUILT_IN_NONE);
8786 if (target)
8787 return target;
8788 break;
8790 case BUILT_IN_ATOMIC_FETCH_OR_1:
8791 case BUILT_IN_ATOMIC_FETCH_OR_2:
8792 case BUILT_IN_ATOMIC_FETCH_OR_4:
8793 case BUILT_IN_ATOMIC_FETCH_OR_8:
8794 case BUILT_IN_ATOMIC_FETCH_OR_16:
8795 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8796 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8797 ignore, BUILT_IN_NONE);
8798 if (target)
8799 return target;
8800 break;
8802 case BUILT_IN_ATOMIC_TEST_AND_SET:
8803 return expand_builtin_atomic_test_and_set (exp, target);
8805 case BUILT_IN_ATOMIC_CLEAR:
8806 return expand_builtin_atomic_clear (exp);
8808 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8809 return expand_builtin_atomic_always_lock_free (exp);
8811 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8812 target = expand_builtin_atomic_is_lock_free (exp);
8813 if (target)
8814 return target;
8815 break;
8817 case BUILT_IN_ATOMIC_THREAD_FENCE:
8818 expand_builtin_atomic_thread_fence (exp);
8819 return const0_rtx;
8821 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8822 expand_builtin_atomic_signal_fence (exp);
8823 return const0_rtx;
8825 case BUILT_IN_OBJECT_SIZE:
8826 return expand_builtin_object_size (exp);
8828 case BUILT_IN_MEMCPY_CHK:
8829 case BUILT_IN_MEMPCPY_CHK:
8830 case BUILT_IN_MEMMOVE_CHK:
8831 case BUILT_IN_MEMSET_CHK:
8832 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8833 if (target)
8834 return target;
8835 break;
8837 case BUILT_IN_STRCPY_CHK:
8838 case BUILT_IN_STPCPY_CHK:
8839 case BUILT_IN_STRNCPY_CHK:
8840 case BUILT_IN_STPNCPY_CHK:
8841 case BUILT_IN_STRCAT_CHK:
8842 case BUILT_IN_STRNCAT_CHK:
8843 case BUILT_IN_SNPRINTF_CHK:
8844 case BUILT_IN_VSNPRINTF_CHK:
8845 maybe_emit_chk_warning (exp, fcode);
8846 break;
8848 case BUILT_IN_SPRINTF_CHK:
8849 case BUILT_IN_VSPRINTF_CHK:
8850 maybe_emit_sprintf_chk_warning (exp, fcode);
8851 break;
8853 case BUILT_IN_FREE:
8854 if (warn_free_nonheap_object)
8855 maybe_emit_free_warning (exp);
8856 break;
8858 case BUILT_IN_THREAD_POINTER:
8859 return expand_builtin_thread_pointer (exp, target);
8861 case BUILT_IN_SET_THREAD_POINTER:
8862 expand_builtin_set_thread_pointer (exp);
8863 return const0_rtx;
8865 case BUILT_IN_ACC_ON_DEVICE:
8866 /* Do library call, if we failed to expand the builtin when
8867 folding. */
8868 break;
8870 case BUILT_IN_GOACC_PARLEVEL_ID:
8871 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8872 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8874 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8875 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8877 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8878 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8879 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8880 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8881 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8882 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8883 return expand_speculation_safe_value (mode, exp, target, ignore);
8885 default: /* just do library call, if unknown builtin */
8886 break;
8889 /* The switch statement above can drop through to cause the function
8890 to be called normally. */
8891 return expand_call (exp, target, ignore);
8894 /* Determine whether a tree node represents a call to a built-in
8895 function. If the tree T is a call to a built-in function with
8896 the right number of arguments of the appropriate types, return
8897 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8898 Otherwise the return value is END_BUILTINS. */
8900 enum built_in_function
8901 builtin_mathfn_code (const_tree t)
8903 const_tree fndecl, arg, parmlist;
8904 const_tree argtype, parmtype;
8905 const_call_expr_arg_iterator iter;
8907 if (TREE_CODE (t) != CALL_EXPR)
8908 return END_BUILTINS;
8910 fndecl = get_callee_fndecl (t);
8911 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8912 return END_BUILTINS;
8914 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8915 init_const_call_expr_arg_iterator (t, &iter);
8916 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8918 /* If a function doesn't take a variable number of arguments,
8919 the last element in the list will have type `void'. */
8920 parmtype = TREE_VALUE (parmlist);
8921 if (VOID_TYPE_P (parmtype))
8923 if (more_const_call_expr_args_p (&iter))
8924 return END_BUILTINS;
8925 return DECL_FUNCTION_CODE (fndecl);
8928 if (! more_const_call_expr_args_p (&iter))
8929 return END_BUILTINS;
8931 arg = next_const_call_expr_arg (&iter);
8932 argtype = TREE_TYPE (arg);
8934 if (SCALAR_FLOAT_TYPE_P (parmtype))
8936 if (! SCALAR_FLOAT_TYPE_P (argtype))
8937 return END_BUILTINS;
8939 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8941 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8942 return END_BUILTINS;
8944 else if (POINTER_TYPE_P (parmtype))
8946 if (! POINTER_TYPE_P (argtype))
8947 return END_BUILTINS;
8949 else if (INTEGRAL_TYPE_P (parmtype))
8951 if (! INTEGRAL_TYPE_P (argtype))
8952 return END_BUILTINS;
8954 else
8955 return END_BUILTINS;
8958 /* Variable-length argument list. */
8959 return DECL_FUNCTION_CODE (fndecl);
8962 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8963 evaluate to a constant. */
8965 static tree
8966 fold_builtin_constant_p (tree arg)
8968 /* We return 1 for a numeric type that's known to be a constant
8969 value at compile-time or for an aggregate type that's a
8970 literal constant. */
8971 STRIP_NOPS (arg);
8973 /* If we know this is a constant, emit the constant of one. */
8974 if (CONSTANT_CLASS_P (arg)
8975 || (TREE_CODE (arg) == CONSTRUCTOR
8976 && TREE_CONSTANT (arg)))
8977 return integer_one_node;
8978 if (TREE_CODE (arg) == ADDR_EXPR)
8980 tree op = TREE_OPERAND (arg, 0);
8981 if (TREE_CODE (op) == STRING_CST
8982 || (TREE_CODE (op) == ARRAY_REF
8983 && integer_zerop (TREE_OPERAND (op, 1))
8984 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8985 return integer_one_node;
8988 /* If this expression has side effects, show we don't know it to be a
8989 constant. Likewise if it's a pointer or aggregate type since in
8990 those case we only want literals, since those are only optimized
8991 when generating RTL, not later.
8992 And finally, if we are compiling an initializer, not code, we
8993 need to return a definite result now; there's not going to be any
8994 more optimization done. */
8995 if (TREE_SIDE_EFFECTS (arg)
8996 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8997 || POINTER_TYPE_P (TREE_TYPE (arg))
8998 || cfun == 0
8999 || folding_initializer
9000 || force_folding_builtin_constant_p)
9001 return integer_zero_node;
9003 return NULL_TREE;
9006 /* Create builtin_expect or builtin_expect_with_probability
9007 with PRED and EXPECTED as its arguments and return it as a truthvalue.
9008 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
9009 builtin_expect_with_probability instead uses third argument as PROBABILITY
9010 value. */
9012 static tree
9013 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
9014 tree predictor, tree probability)
9016 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
9018 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
9019 : BUILT_IN_EXPECT_WITH_PROBABILITY);
9020 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
9021 ret_type = TREE_TYPE (TREE_TYPE (fn));
9022 pred_type = TREE_VALUE (arg_types);
9023 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
9025 pred = fold_convert_loc (loc, pred_type, pred);
9026 expected = fold_convert_loc (loc, expected_type, expected);
9028 if (probability)
9029 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
9030 else
9031 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
9032 predictor);
9034 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
9035 build_int_cst (ret_type, 0));
9038 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
9039 NULL_TREE if no simplification is possible. */
9041 tree
9042 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
9043 tree arg3)
9045 tree inner, fndecl, inner_arg0;
9046 enum tree_code code;
9048 /* Distribute the expected value over short-circuiting operators.
9049 See through the cast from truthvalue_type_node to long. */
9050 inner_arg0 = arg0;
9051 while (CONVERT_EXPR_P (inner_arg0)
9052 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
9053 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
9054 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
9056 /* If this is a builtin_expect within a builtin_expect keep the
9057 inner one. See through a comparison against a constant. It
9058 might have been added to create a thruthvalue. */
9059 inner = inner_arg0;
9061 if (COMPARISON_CLASS_P (inner)
9062 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
9063 inner = TREE_OPERAND (inner, 0);
9065 if (TREE_CODE (inner) == CALL_EXPR
9066 && (fndecl = get_callee_fndecl (inner))
9067 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
9068 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
9069 return arg0;
9071 inner = inner_arg0;
9072 code = TREE_CODE (inner);
9073 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
9075 tree op0 = TREE_OPERAND (inner, 0);
9076 tree op1 = TREE_OPERAND (inner, 1);
9077 arg1 = save_expr (arg1);
9079 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
9080 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
9081 inner = build2 (code, TREE_TYPE (inner), op0, op1);
9083 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
9086 /* If the argument isn't invariant then there's nothing else we can do. */
9087 if (!TREE_CONSTANT (inner_arg0))
9088 return NULL_TREE;
9090 /* If we expect that a comparison against the argument will fold to
9091 a constant return the constant. In practice, this means a true
9092 constant or the address of a non-weak symbol. */
9093 inner = inner_arg0;
9094 STRIP_NOPS (inner);
9095 if (TREE_CODE (inner) == ADDR_EXPR)
9099 inner = TREE_OPERAND (inner, 0);
9101 while (TREE_CODE (inner) == COMPONENT_REF
9102 || TREE_CODE (inner) == ARRAY_REF);
9103 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
9104 return NULL_TREE;
9107 /* Otherwise, ARG0 already has the proper type for the return value. */
9108 return arg0;
9111 /* Fold a call to __builtin_classify_type with argument ARG. */
9113 static tree
9114 fold_builtin_classify_type (tree arg)
9116 if (arg == 0)
9117 return build_int_cst (integer_type_node, no_type_class);
9119 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
9122 /* Fold a call to __builtin_strlen with argument ARG. */
9124 static tree
9125 fold_builtin_strlen (location_t loc, tree type, tree arg)
9127 if (!validate_arg (arg, POINTER_TYPE))
9128 return NULL_TREE;
9129 else
9131 c_strlen_data lendata = { };
9132 tree len = c_strlen (arg, 0, &lendata);
9134 if (len)
9135 return fold_convert_loc (loc, type, len);
9137 if (!lendata.decl)
9138 c_strlen (arg, 1, &lendata);
9140 if (lendata.decl)
9142 if (EXPR_HAS_LOCATION (arg))
9143 loc = EXPR_LOCATION (arg);
9144 else if (loc == UNKNOWN_LOCATION)
9145 loc = input_location;
9146 warn_string_no_nul (loc, "strlen", arg, lendata.decl);
9149 return NULL_TREE;
9153 /* Fold a call to __builtin_inf or __builtin_huge_val. */
9155 static tree
9156 fold_builtin_inf (location_t loc, tree type, int warn)
9158 REAL_VALUE_TYPE real;
9160 /* __builtin_inff is intended to be usable to define INFINITY on all
9161 targets. If an infinity is not available, INFINITY expands "to a
9162 positive constant of type float that overflows at translation
9163 time", footnote "In this case, using INFINITY will violate the
9164 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
9165 Thus we pedwarn to ensure this constraint violation is
9166 diagnosed. */
9167 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
9168 pedwarn (loc, 0, "target format does not support infinity");
9170 real_inf (&real);
9171 return build_real (type, real);
9174 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
9175 NULL_TREE if no simplification can be made. */
9177 static tree
9178 fold_builtin_sincos (location_t loc,
9179 tree arg0, tree arg1, tree arg2)
9181 tree type;
9182 tree fndecl, call = NULL_TREE;
9184 if (!validate_arg (arg0, REAL_TYPE)
9185 || !validate_arg (arg1, POINTER_TYPE)
9186 || !validate_arg (arg2, POINTER_TYPE))
9187 return NULL_TREE;
9189 type = TREE_TYPE (arg0);
9191 /* Calculate the result when the argument is a constant. */
9192 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
9193 if (fn == END_BUILTINS)
9194 return NULL_TREE;
9196 /* Canonicalize sincos to cexpi. */
9197 if (TREE_CODE (arg0) == REAL_CST)
9199 tree complex_type = build_complex_type (type);
9200 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
9202 if (!call)
9204 if (!targetm.libc_has_function (function_c99_math_complex)
9205 || !builtin_decl_implicit_p (fn))
9206 return NULL_TREE;
9207 fndecl = builtin_decl_explicit (fn);
9208 call = build_call_expr_loc (loc, fndecl, 1, arg0);
9209 call = builtin_save_expr (call);
9212 tree ptype = build_pointer_type (type);
9213 arg1 = fold_convert (ptype, arg1);
9214 arg2 = fold_convert (ptype, arg2);
9215 return build2 (COMPOUND_EXPR, void_type_node,
9216 build2 (MODIFY_EXPR, void_type_node,
9217 build_fold_indirect_ref_loc (loc, arg1),
9218 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
9219 build2 (MODIFY_EXPR, void_type_node,
9220 build_fold_indirect_ref_loc (loc, arg2),
9221 fold_build1_loc (loc, REALPART_EXPR, type, call)));
9224 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9225 Return NULL_TREE if no simplification can be made. */
9227 static tree
9228 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9230 if (!validate_arg (arg1, POINTER_TYPE)
9231 || !validate_arg (arg2, POINTER_TYPE)
9232 || !validate_arg (len, INTEGER_TYPE))
9233 return NULL_TREE;
9235 /* If the LEN parameter is zero, return zero. */
9236 if (integer_zerop (len))
9237 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9238 arg1, arg2);
9240 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9241 if (operand_equal_p (arg1, arg2, 0))
9242 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9244 /* If len parameter is one, return an expression corresponding to
9245 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9246 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
9248 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9249 tree cst_uchar_ptr_node
9250 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9252 tree ind1
9253 = fold_convert_loc (loc, integer_type_node,
9254 build1 (INDIRECT_REF, cst_uchar_node,
9255 fold_convert_loc (loc,
9256 cst_uchar_ptr_node,
9257 arg1)));
9258 tree ind2
9259 = fold_convert_loc (loc, integer_type_node,
9260 build1 (INDIRECT_REF, cst_uchar_node,
9261 fold_convert_loc (loc,
9262 cst_uchar_ptr_node,
9263 arg2)));
9264 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9267 return NULL_TREE;
9270 /* Fold a call to builtin isascii with argument ARG. */
9272 static tree
9273 fold_builtin_isascii (location_t loc, tree arg)
9275 if (!validate_arg (arg, INTEGER_TYPE))
9276 return NULL_TREE;
9277 else
9279 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9280 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9281 build_int_cst (integer_type_node,
9282 ~ (unsigned HOST_WIDE_INT) 0x7f));
9283 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9284 arg, integer_zero_node);
9288 /* Fold a call to builtin toascii with argument ARG. */
9290 static tree
9291 fold_builtin_toascii (location_t loc, tree arg)
9293 if (!validate_arg (arg, INTEGER_TYPE))
9294 return NULL_TREE;
9296 /* Transform toascii(c) -> (c & 0x7f). */
9297 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9298 build_int_cst (integer_type_node, 0x7f));
9301 /* Fold a call to builtin isdigit with argument ARG. */
9303 static tree
9304 fold_builtin_isdigit (location_t loc, tree arg)
9306 if (!validate_arg (arg, INTEGER_TYPE))
9307 return NULL_TREE;
9308 else
9310 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9311 /* According to the C standard, isdigit is unaffected by locale.
9312 However, it definitely is affected by the target character set. */
9313 unsigned HOST_WIDE_INT target_digit0
9314 = lang_hooks.to_target_charset ('0');
9316 if (target_digit0 == 0)
9317 return NULL_TREE;
9319 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9320 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9321 build_int_cst (unsigned_type_node, target_digit0));
9322 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9323 build_int_cst (unsigned_type_node, 9));
9327 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9329 static tree
9330 fold_builtin_fabs (location_t loc, tree arg, tree type)
9332 if (!validate_arg (arg, REAL_TYPE))
9333 return NULL_TREE;
9335 arg = fold_convert_loc (loc, type, arg);
9336 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9339 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9341 static tree
9342 fold_builtin_abs (location_t loc, tree arg, tree type)
9344 if (!validate_arg (arg, INTEGER_TYPE))
9345 return NULL_TREE;
9347 arg = fold_convert_loc (loc, type, arg);
9348 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9351 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9353 static tree
9354 fold_builtin_carg (location_t loc, tree arg, tree type)
9356 if (validate_arg (arg, COMPLEX_TYPE)
9357 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9359 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9361 if (atan2_fn)
9363 tree new_arg = builtin_save_expr (arg);
9364 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9365 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9366 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9370 return NULL_TREE;
9373 /* Fold a call to builtin frexp, we can assume the base is 2. */
9375 static tree
9376 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9378 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9379 return NULL_TREE;
9381 STRIP_NOPS (arg0);
9383 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9384 return NULL_TREE;
9386 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9388 /* Proceed if a valid pointer type was passed in. */
9389 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9391 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9392 tree frac, exp;
9394 switch (value->cl)
9396 case rvc_zero:
9397 /* For +-0, return (*exp = 0, +-0). */
9398 exp = integer_zero_node;
9399 frac = arg0;
9400 break;
9401 case rvc_nan:
9402 case rvc_inf:
9403 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9404 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9405 case rvc_normal:
9407 /* Since the frexp function always expects base 2, and in
9408 GCC normalized significands are already in the range
9409 [0.5, 1.0), we have exactly what frexp wants. */
9410 REAL_VALUE_TYPE frac_rvt = *value;
9411 SET_REAL_EXP (&frac_rvt, 0);
9412 frac = build_real (rettype, frac_rvt);
9413 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9415 break;
9416 default:
9417 gcc_unreachable ();
9420 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9421 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9422 TREE_SIDE_EFFECTS (arg1) = 1;
9423 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9426 return NULL_TREE;
9429 /* Fold a call to builtin modf. */
9431 static tree
9432 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9434 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9435 return NULL_TREE;
9437 STRIP_NOPS (arg0);
9439 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9440 return NULL_TREE;
9442 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9444 /* Proceed if a valid pointer type was passed in. */
9445 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9447 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9448 REAL_VALUE_TYPE trunc, frac;
9450 switch (value->cl)
9452 case rvc_nan:
9453 case rvc_zero:
9454 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9455 trunc = frac = *value;
9456 break;
9457 case rvc_inf:
9458 /* For +-Inf, return (*arg1 = arg0, +-0). */
9459 frac = dconst0;
9460 frac.sign = value->sign;
9461 trunc = *value;
9462 break;
9463 case rvc_normal:
9464 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9465 real_trunc (&trunc, VOIDmode, value);
9466 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9467 /* If the original number was negative and already
9468 integral, then the fractional part is -0.0. */
9469 if (value->sign && frac.cl == rvc_zero)
9470 frac.sign = value->sign;
9471 break;
9474 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9475 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9476 build_real (rettype, trunc));
9477 TREE_SIDE_EFFECTS (arg1) = 1;
9478 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9479 build_real (rettype, frac));
9482 return NULL_TREE;
9485 /* Given a location LOC, an interclass builtin function decl FNDECL
9486 and its single argument ARG, return an folded expression computing
9487 the same, or NULL_TREE if we either couldn't or didn't want to fold
9488 (the latter happen if there's an RTL instruction available). */
9490 static tree
9491 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9493 machine_mode mode;
9495 if (!validate_arg (arg, REAL_TYPE))
9496 return NULL_TREE;
9498 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9499 return NULL_TREE;
9501 mode = TYPE_MODE (TREE_TYPE (arg));
9503 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
9505 /* If there is no optab, try generic code. */
9506 switch (DECL_FUNCTION_CODE (fndecl))
9508 tree result;
9510 CASE_FLT_FN (BUILT_IN_ISINF):
9512 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9513 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9514 tree type = TREE_TYPE (arg);
9515 REAL_VALUE_TYPE r;
9516 char buf[128];
9518 if (is_ibm_extended)
9520 /* NaN and Inf are encoded in the high-order double value
9521 only. The low-order value is not significant. */
9522 type = double_type_node;
9523 mode = DFmode;
9524 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9526 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9527 real_from_string (&r, buf);
9528 result = build_call_expr (isgr_fn, 2,
9529 fold_build1_loc (loc, ABS_EXPR, type, arg),
9530 build_real (type, r));
9531 return result;
9533 CASE_FLT_FN (BUILT_IN_FINITE):
9534 case BUILT_IN_ISFINITE:
9536 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9537 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9538 tree type = TREE_TYPE (arg);
9539 REAL_VALUE_TYPE r;
9540 char buf[128];
9542 if (is_ibm_extended)
9544 /* NaN and Inf are encoded in the high-order double value
9545 only. The low-order value is not significant. */
9546 type = double_type_node;
9547 mode = DFmode;
9548 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9550 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9551 real_from_string (&r, buf);
9552 result = build_call_expr (isle_fn, 2,
9553 fold_build1_loc (loc, ABS_EXPR, type, arg),
9554 build_real (type, r));
9555 /*result = fold_build2_loc (loc, UNGT_EXPR,
9556 TREE_TYPE (TREE_TYPE (fndecl)),
9557 fold_build1_loc (loc, ABS_EXPR, type, arg),
9558 build_real (type, r));
9559 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9560 TREE_TYPE (TREE_TYPE (fndecl)),
9561 result);*/
9562 return result;
9564 case BUILT_IN_ISNORMAL:
9566 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9567 islessequal(fabs(x),DBL_MAX). */
9568 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9569 tree type = TREE_TYPE (arg);
9570 tree orig_arg, max_exp, min_exp;
9571 machine_mode orig_mode = mode;
9572 REAL_VALUE_TYPE rmax, rmin;
9573 char buf[128];
9575 orig_arg = arg = builtin_save_expr (arg);
9576 if (is_ibm_extended)
9578 /* Use double to test the normal range of IBM extended
9579 precision. Emin for IBM extended precision is
9580 different to emin for IEEE double, being 53 higher
9581 since the low double exponent is at least 53 lower
9582 than the high double exponent. */
9583 type = double_type_node;
9584 mode = DFmode;
9585 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9587 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
9589 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9590 real_from_string (&rmax, buf);
9591 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9592 real_from_string (&rmin, buf);
9593 max_exp = build_real (type, rmax);
9594 min_exp = build_real (type, rmin);
9596 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
9597 if (is_ibm_extended)
9599 /* Testing the high end of the range is done just using
9600 the high double, using the same test as isfinite().
9601 For the subnormal end of the range we first test the
9602 high double, then if its magnitude is equal to the
9603 limit of 0x1p-969, we test whether the low double is
9604 non-zero and opposite sign to the high double. */
9605 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
9606 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9607 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
9608 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
9609 arg, min_exp);
9610 tree as_complex = build1 (VIEW_CONVERT_EXPR,
9611 complex_double_type_node, orig_arg);
9612 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
9613 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
9614 tree zero = build_real (type, dconst0);
9615 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
9616 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
9617 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
9618 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
9619 fold_build3 (COND_EXPR,
9620 integer_type_node,
9621 hilt, logt, lolt));
9622 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
9623 eq_min, ok_lo);
9624 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
9625 gt_min, eq_min);
9627 else
9629 tree const isge_fn
9630 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9631 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
9633 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
9634 max_exp, min_exp);
9635 return result;
9637 default:
9638 break;
9641 return NULL_TREE;
9644 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9645 ARG is the argument for the call. */
9647 static tree
9648 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9650 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9652 if (!validate_arg (arg, REAL_TYPE))
9653 return NULL_TREE;
9655 switch (builtin_index)
9657 case BUILT_IN_ISINF:
9658 if (!HONOR_INFINITIES (arg))
9659 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9661 return NULL_TREE;
9663 case BUILT_IN_ISINF_SIGN:
9665 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9666 /* In a boolean context, GCC will fold the inner COND_EXPR to
9667 1. So e.g. "if (isinf_sign(x))" would be folded to just
9668 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9669 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
9670 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9671 tree tmp = NULL_TREE;
9673 arg = builtin_save_expr (arg);
9675 if (signbit_fn && isinf_fn)
9677 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9678 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9680 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9681 signbit_call, integer_zero_node);
9682 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9683 isinf_call, integer_zero_node);
9685 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9686 integer_minus_one_node, integer_one_node);
9687 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9688 isinf_call, tmp,
9689 integer_zero_node);
9692 return tmp;
9695 case BUILT_IN_ISFINITE:
9696 if (!HONOR_NANS (arg)
9697 && !HONOR_INFINITIES (arg))
9698 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9700 return NULL_TREE;
9702 case BUILT_IN_ISNAN:
9703 if (!HONOR_NANS (arg))
9704 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9707 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
9708 if (is_ibm_extended)
9710 /* NaN and Inf are encoded in the high-order double value
9711 only. The low-order value is not significant. */
9712 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9715 arg = builtin_save_expr (arg);
9716 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9718 default:
9719 gcc_unreachable ();
9723 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9724 This builtin will generate code to return the appropriate floating
9725 point classification depending on the value of the floating point
9726 number passed in. The possible return values must be supplied as
9727 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9728 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9729 one floating point argument which is "type generic". */
9731 static tree
9732 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9734 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9735 arg, type, res, tmp;
9736 machine_mode mode;
9737 REAL_VALUE_TYPE r;
9738 char buf[128];
9740 /* Verify the required arguments in the original call. */
9741 if (nargs != 6
9742 || !validate_arg (args[0], INTEGER_TYPE)
9743 || !validate_arg (args[1], INTEGER_TYPE)
9744 || !validate_arg (args[2], INTEGER_TYPE)
9745 || !validate_arg (args[3], INTEGER_TYPE)
9746 || !validate_arg (args[4], INTEGER_TYPE)
9747 || !validate_arg (args[5], REAL_TYPE))
9748 return NULL_TREE;
9750 fp_nan = args[0];
9751 fp_infinite = args[1];
9752 fp_normal = args[2];
9753 fp_subnormal = args[3];
9754 fp_zero = args[4];
9755 arg = args[5];
9756 type = TREE_TYPE (arg);
9757 mode = TYPE_MODE (type);
9758 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9760 /* fpclassify(x) ->
9761 isnan(x) ? FP_NAN :
9762 (fabs(x) == Inf ? FP_INFINITE :
9763 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9764 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9766 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9767 build_real (type, dconst0));
9768 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9769 tmp, fp_zero, fp_subnormal);
9771 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9772 real_from_string (&r, buf);
9773 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9774 arg, build_real (type, r));
9775 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9777 if (HONOR_INFINITIES (mode))
9779 real_inf (&r);
9780 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9781 build_real (type, r));
9782 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9783 fp_infinite, res);
9786 if (HONOR_NANS (mode))
9788 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9789 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9792 return res;
9795 /* Fold a call to an unordered comparison function such as
9796 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9797 being called and ARG0 and ARG1 are the arguments for the call.
9798 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9799 the opposite of the desired result. UNORDERED_CODE is used
9800 for modes that can hold NaNs and ORDERED_CODE is used for
9801 the rest. */
9803 static tree
9804 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9805 enum tree_code unordered_code,
9806 enum tree_code ordered_code)
9808 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9809 enum tree_code code;
9810 tree type0, type1;
9811 enum tree_code code0, code1;
9812 tree cmp_type = NULL_TREE;
9814 type0 = TREE_TYPE (arg0);
9815 type1 = TREE_TYPE (arg1);
9817 code0 = TREE_CODE (type0);
9818 code1 = TREE_CODE (type1);
9820 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9821 /* Choose the wider of two real types. */
9822 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9823 ? type0 : type1;
9824 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9825 cmp_type = type0;
9826 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9827 cmp_type = type1;
9829 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9830 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9832 if (unordered_code == UNORDERED_EXPR)
9834 if (!HONOR_NANS (arg0))
9835 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9836 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9839 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9840 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9841 fold_build2_loc (loc, code, type, arg0, arg1));
9844 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9845 arithmetics if it can never overflow, or into internal functions that
9846 return both result of arithmetics and overflowed boolean flag in
9847 a complex integer result, or some other check for overflow.
9848 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9849 checking part of that. */
9851 static tree
9852 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9853 tree arg0, tree arg1, tree arg2)
9855 enum internal_fn ifn = IFN_LAST;
9856 /* The code of the expression corresponding to the built-in. */
9857 enum tree_code opcode = ERROR_MARK;
9858 bool ovf_only = false;
9860 switch (fcode)
9862 case BUILT_IN_ADD_OVERFLOW_P:
9863 ovf_only = true;
9864 /* FALLTHRU */
9865 case BUILT_IN_ADD_OVERFLOW:
9866 case BUILT_IN_SADD_OVERFLOW:
9867 case BUILT_IN_SADDL_OVERFLOW:
9868 case BUILT_IN_SADDLL_OVERFLOW:
9869 case BUILT_IN_UADD_OVERFLOW:
9870 case BUILT_IN_UADDL_OVERFLOW:
9871 case BUILT_IN_UADDLL_OVERFLOW:
9872 opcode = PLUS_EXPR;
9873 ifn = IFN_ADD_OVERFLOW;
9874 break;
9875 case BUILT_IN_SUB_OVERFLOW_P:
9876 ovf_only = true;
9877 /* FALLTHRU */
9878 case BUILT_IN_SUB_OVERFLOW:
9879 case BUILT_IN_SSUB_OVERFLOW:
9880 case BUILT_IN_SSUBL_OVERFLOW:
9881 case BUILT_IN_SSUBLL_OVERFLOW:
9882 case BUILT_IN_USUB_OVERFLOW:
9883 case BUILT_IN_USUBL_OVERFLOW:
9884 case BUILT_IN_USUBLL_OVERFLOW:
9885 opcode = MINUS_EXPR;
9886 ifn = IFN_SUB_OVERFLOW;
9887 break;
9888 case BUILT_IN_MUL_OVERFLOW_P:
9889 ovf_only = true;
9890 /* FALLTHRU */
9891 case BUILT_IN_MUL_OVERFLOW:
9892 case BUILT_IN_SMUL_OVERFLOW:
9893 case BUILT_IN_SMULL_OVERFLOW:
9894 case BUILT_IN_SMULLL_OVERFLOW:
9895 case BUILT_IN_UMUL_OVERFLOW:
9896 case BUILT_IN_UMULL_OVERFLOW:
9897 case BUILT_IN_UMULLL_OVERFLOW:
9898 opcode = MULT_EXPR;
9899 ifn = IFN_MUL_OVERFLOW;
9900 break;
9901 default:
9902 gcc_unreachable ();
9905 /* For the "generic" overloads, the first two arguments can have different
9906 types and the last argument determines the target type to use to check
9907 for overflow. The arguments of the other overloads all have the same
9908 type. */
9909 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9911 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9912 arguments are constant, attempt to fold the built-in call into a constant
9913 expression indicating whether or not it detected an overflow. */
9914 if (ovf_only
9915 && TREE_CODE (arg0) == INTEGER_CST
9916 && TREE_CODE (arg1) == INTEGER_CST)
9917 /* Perform the computation in the target type and check for overflow. */
9918 return omit_one_operand_loc (loc, boolean_type_node,
9919 arith_overflowed_p (opcode, type, arg0, arg1)
9920 ? boolean_true_node : boolean_false_node,
9921 arg2);
9923 tree intres, ovfres;
9924 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9926 intres = fold_binary_loc (loc, opcode, type,
9927 fold_convert_loc (loc, type, arg0),
9928 fold_convert_loc (loc, type, arg1));
9929 if (TREE_OVERFLOW (intres))
9930 intres = drop_tree_overflow (intres);
9931 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
9932 ? boolean_true_node : boolean_false_node);
9934 else
9936 tree ctype = build_complex_type (type);
9937 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
9938 arg0, arg1);
9939 tree tgt = save_expr (call);
9940 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9941 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9942 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9945 if (ovf_only)
9946 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9948 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9949 tree store
9950 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9951 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9954 /* Fold a call to __builtin_FILE to a constant string. */
9956 static inline tree
9957 fold_builtin_FILE (location_t loc)
9959 if (const char *fname = LOCATION_FILE (loc))
9961 /* The documentation says this builtin is equivalent to the preprocessor
9962 __FILE__ macro so it appears appropriate to use the same file prefix
9963 mappings. */
9964 fname = remap_macro_filename (fname);
9965 return build_string_literal (strlen (fname) + 1, fname);
9968 return build_string_literal (1, "");
9971 /* Fold a call to __builtin_FUNCTION to a constant string. */
9973 static inline tree
9974 fold_builtin_FUNCTION ()
9976 const char *name = "";
9978 if (current_function_decl)
9979 name = lang_hooks.decl_printable_name (current_function_decl, 0);
9981 return build_string_literal (strlen (name) + 1, name);
9984 /* Fold a call to __builtin_LINE to an integer constant. */
9986 static inline tree
9987 fold_builtin_LINE (location_t loc, tree type)
9989 return build_int_cst (type, LOCATION_LINE (loc));
9992 /* Fold a call to built-in function FNDECL with 0 arguments.
9993 This function returns NULL_TREE if no simplification was possible. */
9995 static tree
9996 fold_builtin_0 (location_t loc, tree fndecl)
9998 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9999 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10000 switch (fcode)
10002 case BUILT_IN_FILE:
10003 return fold_builtin_FILE (loc);
10005 case BUILT_IN_FUNCTION:
10006 return fold_builtin_FUNCTION ();
10008 case BUILT_IN_LINE:
10009 return fold_builtin_LINE (loc, type);
10011 CASE_FLT_FN (BUILT_IN_INF):
10012 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
10013 case BUILT_IN_INFD32:
10014 case BUILT_IN_INFD64:
10015 case BUILT_IN_INFD128:
10016 return fold_builtin_inf (loc, type, true);
10018 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10019 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
10020 return fold_builtin_inf (loc, type, false);
10022 case BUILT_IN_CLASSIFY_TYPE:
10023 return fold_builtin_classify_type (NULL_TREE);
10025 default:
10026 break;
10028 return NULL_TREE;
10031 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10032 This function returns NULL_TREE if no simplification was possible. */
10034 static tree
10035 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
10037 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10038 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10040 if (TREE_CODE (arg0) == ERROR_MARK)
10041 return NULL_TREE;
10043 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
10044 return ret;
10046 switch (fcode)
10048 case BUILT_IN_CONSTANT_P:
10050 tree val = fold_builtin_constant_p (arg0);
10052 /* Gimplification will pull the CALL_EXPR for the builtin out of
10053 an if condition. When not optimizing, we'll not CSE it back.
10054 To avoid link error types of regressions, return false now. */
10055 if (!val && !optimize)
10056 val = integer_zero_node;
10058 return val;
10061 case BUILT_IN_CLASSIFY_TYPE:
10062 return fold_builtin_classify_type (arg0);
10064 case BUILT_IN_STRLEN:
10065 return fold_builtin_strlen (loc, type, arg0);
10067 CASE_FLT_FN (BUILT_IN_FABS):
10068 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
10069 case BUILT_IN_FABSD32:
10070 case BUILT_IN_FABSD64:
10071 case BUILT_IN_FABSD128:
10072 return fold_builtin_fabs (loc, arg0, type);
10074 case BUILT_IN_ABS:
10075 case BUILT_IN_LABS:
10076 case BUILT_IN_LLABS:
10077 case BUILT_IN_IMAXABS:
10078 return fold_builtin_abs (loc, arg0, type);
10080 CASE_FLT_FN (BUILT_IN_CONJ):
10081 if (validate_arg (arg0, COMPLEX_TYPE)
10082 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10083 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10084 break;
10086 CASE_FLT_FN (BUILT_IN_CREAL):
10087 if (validate_arg (arg0, COMPLEX_TYPE)
10088 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10089 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
10090 break;
10092 CASE_FLT_FN (BUILT_IN_CIMAG):
10093 if (validate_arg (arg0, COMPLEX_TYPE)
10094 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10095 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10096 break;
10098 CASE_FLT_FN (BUILT_IN_CARG):
10099 return fold_builtin_carg (loc, arg0, type);
10101 case BUILT_IN_ISASCII:
10102 return fold_builtin_isascii (loc, arg0);
10104 case BUILT_IN_TOASCII:
10105 return fold_builtin_toascii (loc, arg0);
10107 case BUILT_IN_ISDIGIT:
10108 return fold_builtin_isdigit (loc, arg0);
10110 CASE_FLT_FN (BUILT_IN_FINITE):
10111 case BUILT_IN_FINITED32:
10112 case BUILT_IN_FINITED64:
10113 case BUILT_IN_FINITED128:
10114 case BUILT_IN_ISFINITE:
10116 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10117 if (ret)
10118 return ret;
10119 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10122 CASE_FLT_FN (BUILT_IN_ISINF):
10123 case BUILT_IN_ISINFD32:
10124 case BUILT_IN_ISINFD64:
10125 case BUILT_IN_ISINFD128:
10127 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10128 if (ret)
10129 return ret;
10130 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10133 case BUILT_IN_ISNORMAL:
10134 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10136 case BUILT_IN_ISINF_SIGN:
10137 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10139 CASE_FLT_FN (BUILT_IN_ISNAN):
10140 case BUILT_IN_ISNAND32:
10141 case BUILT_IN_ISNAND64:
10142 case BUILT_IN_ISNAND128:
10143 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10145 case BUILT_IN_FREE:
10146 if (integer_zerop (arg0))
10147 return build_empty_stmt (loc);
10148 break;
10150 default:
10151 break;
10154 return NULL_TREE;
10158 /* Folds a call EXPR (which may be null) to built-in function FNDECL
10159 with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
10160 if no simplification was possible. */
10162 static tree
10163 fold_builtin_2 (location_t loc, tree expr, tree fndecl, tree arg0, tree arg1)
10165 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10166 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10168 if (TREE_CODE (arg0) == ERROR_MARK
10169 || TREE_CODE (arg1) == ERROR_MARK)
10170 return NULL_TREE;
10172 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
10173 return ret;
10175 switch (fcode)
10177 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10178 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10179 if (validate_arg (arg0, REAL_TYPE)
10180 && validate_arg (arg1, POINTER_TYPE))
10181 return do_mpfr_lgamma_r (arg0, arg1, type);
10182 break;
10184 CASE_FLT_FN (BUILT_IN_FREXP):
10185 return fold_builtin_frexp (loc, arg0, arg1, type);
10187 CASE_FLT_FN (BUILT_IN_MODF):
10188 return fold_builtin_modf (loc, arg0, arg1, type);
10190 case BUILT_IN_STRSPN:
10191 return fold_builtin_strspn (loc, expr, arg0, arg1);
10193 case BUILT_IN_STRCSPN:
10194 return fold_builtin_strcspn (loc, expr, arg0, arg1);
10196 case BUILT_IN_STRPBRK:
10197 return fold_builtin_strpbrk (loc, expr, arg0, arg1, type);
10199 case BUILT_IN_EXPECT:
10200 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
10202 case BUILT_IN_ISGREATER:
10203 return fold_builtin_unordered_cmp (loc, fndecl,
10204 arg0, arg1, UNLE_EXPR, LE_EXPR);
10205 case BUILT_IN_ISGREATEREQUAL:
10206 return fold_builtin_unordered_cmp (loc, fndecl,
10207 arg0, arg1, UNLT_EXPR, LT_EXPR);
10208 case BUILT_IN_ISLESS:
10209 return fold_builtin_unordered_cmp (loc, fndecl,
10210 arg0, arg1, UNGE_EXPR, GE_EXPR);
10211 case BUILT_IN_ISLESSEQUAL:
10212 return fold_builtin_unordered_cmp (loc, fndecl,
10213 arg0, arg1, UNGT_EXPR, GT_EXPR);
10214 case BUILT_IN_ISLESSGREATER:
10215 return fold_builtin_unordered_cmp (loc, fndecl,
10216 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10217 case BUILT_IN_ISUNORDERED:
10218 return fold_builtin_unordered_cmp (loc, fndecl,
10219 arg0, arg1, UNORDERED_EXPR,
10220 NOP_EXPR);
10222 /* We do the folding for va_start in the expander. */
10223 case BUILT_IN_VA_START:
10224 break;
10226 case BUILT_IN_OBJECT_SIZE:
10227 return fold_builtin_object_size (arg0, arg1);
10229 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10230 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10232 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10233 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10235 default:
10236 break;
10238 return NULL_TREE;
10241 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10242 and ARG2.
10243 This function returns NULL_TREE if no simplification was possible. */
10245 static tree
10246 fold_builtin_3 (location_t loc, tree fndecl,
10247 tree arg0, tree arg1, tree arg2)
10249 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10250 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10252 if (TREE_CODE (arg0) == ERROR_MARK
10253 || TREE_CODE (arg1) == ERROR_MARK
10254 || TREE_CODE (arg2) == ERROR_MARK)
10255 return NULL_TREE;
10257 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
10258 arg0, arg1, arg2))
10259 return ret;
10261 switch (fcode)
10264 CASE_FLT_FN (BUILT_IN_SINCOS):
10265 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10267 CASE_FLT_FN (BUILT_IN_REMQUO):
10268 if (validate_arg (arg0, REAL_TYPE)
10269 && validate_arg (arg1, REAL_TYPE)
10270 && validate_arg (arg2, POINTER_TYPE))
10271 return do_mpfr_remquo (arg0, arg1, arg2);
10272 break;
10274 case BUILT_IN_MEMCMP:
10275 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
10277 case BUILT_IN_EXPECT:
10278 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
10280 case BUILT_IN_EXPECT_WITH_PROBABILITY:
10281 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
10283 case BUILT_IN_ADD_OVERFLOW:
10284 case BUILT_IN_SUB_OVERFLOW:
10285 case BUILT_IN_MUL_OVERFLOW:
10286 case BUILT_IN_ADD_OVERFLOW_P:
10287 case BUILT_IN_SUB_OVERFLOW_P:
10288 case BUILT_IN_MUL_OVERFLOW_P:
10289 case BUILT_IN_SADD_OVERFLOW:
10290 case BUILT_IN_SADDL_OVERFLOW:
10291 case BUILT_IN_SADDLL_OVERFLOW:
10292 case BUILT_IN_SSUB_OVERFLOW:
10293 case BUILT_IN_SSUBL_OVERFLOW:
10294 case BUILT_IN_SSUBLL_OVERFLOW:
10295 case BUILT_IN_SMUL_OVERFLOW:
10296 case BUILT_IN_SMULL_OVERFLOW:
10297 case BUILT_IN_SMULLL_OVERFLOW:
10298 case BUILT_IN_UADD_OVERFLOW:
10299 case BUILT_IN_UADDL_OVERFLOW:
10300 case BUILT_IN_UADDLL_OVERFLOW:
10301 case BUILT_IN_USUB_OVERFLOW:
10302 case BUILT_IN_USUBL_OVERFLOW:
10303 case BUILT_IN_USUBLL_OVERFLOW:
10304 case BUILT_IN_UMUL_OVERFLOW:
10305 case BUILT_IN_UMULL_OVERFLOW:
10306 case BUILT_IN_UMULLL_OVERFLOW:
10307 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10309 default:
10310 break;
10312 return NULL_TREE;
10315 /* Folds a call EXPR (which may be null) to built-in function FNDECL.
10316 ARGS is an array of NARGS arguments. IGNORE is true if the result
10317 of the function call is ignored. This function returns NULL_TREE
10318 if no simplification was possible. */
10320 static tree
10321 fold_builtin_n (location_t loc, tree expr, tree fndecl, tree *args,
10322 int nargs, bool)
10324 tree ret = NULL_TREE;
10326 switch (nargs)
10328 case 0:
10329 ret = fold_builtin_0 (loc, fndecl);
10330 break;
10331 case 1:
10332 ret = fold_builtin_1 (loc, fndecl, args[0]);
10333 break;
10334 case 2:
10335 ret = fold_builtin_2 (loc, expr, fndecl, args[0], args[1]);
10336 break;
10337 case 3:
10338 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
10339 break;
10340 default:
10341 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
10342 break;
10344 if (ret)
10346 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10347 SET_EXPR_LOCATION (ret, loc);
10348 return ret;
10350 return NULL_TREE;
10353 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10354 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10355 of arguments in ARGS to be omitted. OLDNARGS is the number of
10356 elements in ARGS. */
10358 static tree
10359 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10360 int skip, tree fndecl, int n, va_list newargs)
10362 int nargs = oldnargs - skip + n;
10363 tree *buffer;
10365 if (n > 0)
10367 int i, j;
10369 buffer = XALLOCAVEC (tree, nargs);
10370 for (i = 0; i < n; i++)
10371 buffer[i] = va_arg (newargs, tree);
10372 for (j = skip; j < oldnargs; j++, i++)
10373 buffer[i] = args[j];
10375 else
10376 buffer = args + skip;
10378 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10381 /* Return true if FNDECL shouldn't be folded right now.
10382 If a built-in function has an inline attribute always_inline
10383 wrapper, defer folding it after always_inline functions have
10384 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10385 might not be performed. */
10387 bool
10388 avoid_folding_inline_builtin (tree fndecl)
10390 return (DECL_DECLARED_INLINE_P (fndecl)
10391 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10392 && cfun
10393 && !cfun->always_inline_functions_inlined
10394 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10397 /* A wrapper function for builtin folding that prevents warnings for
10398 "statement without effect" and the like, caused by removing the
10399 call node earlier than the warning is generated. */
10401 tree
10402 fold_call_expr (location_t loc, tree exp, bool ignore)
10404 tree ret = NULL_TREE;
10405 tree fndecl = get_callee_fndecl (exp);
10406 if (fndecl && fndecl_built_in_p (fndecl)
10407 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10408 yet. Defer folding until we see all the arguments
10409 (after inlining). */
10410 && !CALL_EXPR_VA_ARG_PACK (exp))
10412 int nargs = call_expr_nargs (exp);
10414 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10415 instead last argument is __builtin_va_arg_pack (). Defer folding
10416 even in that case, until arguments are finalized. */
10417 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10419 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10420 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
10421 return NULL_TREE;
10424 if (avoid_folding_inline_builtin (fndecl))
10425 return NULL_TREE;
10427 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10428 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10429 CALL_EXPR_ARGP (exp), ignore);
10430 else
10432 tree *args = CALL_EXPR_ARGP (exp);
10433 ret = fold_builtin_n (loc, exp, fndecl, args, nargs, ignore);
10434 if (ret)
10435 return ret;
10438 return NULL_TREE;
10441 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10442 N arguments are passed in the array ARGARRAY. Return a folded
10443 expression or NULL_TREE if no simplification was possible. */
10445 tree
10446 fold_builtin_call_array (location_t loc, tree,
10447 tree fn,
10448 int n,
10449 tree *argarray)
10451 if (TREE_CODE (fn) != ADDR_EXPR)
10452 return NULL_TREE;
10454 tree fndecl = TREE_OPERAND (fn, 0);
10455 if (TREE_CODE (fndecl) == FUNCTION_DECL
10456 && fndecl_built_in_p (fndecl))
10458 /* If last argument is __builtin_va_arg_pack (), arguments to this
10459 function are not finalized yet. Defer folding until they are. */
10460 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10462 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10463 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
10464 return NULL_TREE;
10466 if (avoid_folding_inline_builtin (fndecl))
10467 return NULL_TREE;
10468 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10469 return targetm.fold_builtin (fndecl, n, argarray, false);
10470 else
10471 return fold_builtin_n (loc, NULL_TREE, fndecl, argarray, n, false);
10474 return NULL_TREE;
10477 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10478 along with N new arguments specified as the "..." parameters. SKIP
10479 is the number of arguments in EXP to be omitted. This function is used
10480 to do varargs-to-varargs transformations. */
10482 static tree
10483 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10485 va_list ap;
10486 tree t;
10488 va_start (ap, n);
10489 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10490 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10491 va_end (ap);
10493 return t;
10496 /* Validate a single argument ARG against a tree code CODE representing
10497 a type. Return true when argument is valid. */
10499 static bool
10500 validate_arg (const_tree arg, enum tree_code code)
10502 if (!arg)
10503 return false;
10504 else if (code == POINTER_TYPE)
10505 return POINTER_TYPE_P (TREE_TYPE (arg));
10506 else if (code == INTEGER_TYPE)
10507 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10508 return code == TREE_CODE (TREE_TYPE (arg));
10511 /* This function validates the types of a function call argument list
10512 against a specified list of tree_codes. If the last specifier is a 0,
10513 that represents an ellipses, otherwise the last specifier must be a
10514 VOID_TYPE.
10516 This is the GIMPLE version of validate_arglist. Eventually we want to
10517 completely convert builtins.c to work from GIMPLEs and the tree based
10518 validate_arglist will then be removed. */
10520 bool
10521 validate_gimple_arglist (const gcall *call, ...)
10523 enum tree_code code;
10524 bool res = 0;
10525 va_list ap;
10526 const_tree arg;
10527 size_t i;
10529 va_start (ap, call);
10530 i = 0;
10534 code = (enum tree_code) va_arg (ap, int);
10535 switch (code)
10537 case 0:
10538 /* This signifies an ellipses, any further arguments are all ok. */
10539 res = true;
10540 goto end;
10541 case VOID_TYPE:
10542 /* This signifies an endlink, if no arguments remain, return
10543 true, otherwise return false. */
10544 res = (i == gimple_call_num_args (call));
10545 goto end;
10546 default:
10547 /* If no parameters remain or the parameter's code does not
10548 match the specified code, return false. Otherwise continue
10549 checking any remaining arguments. */
10550 arg = gimple_call_arg (call, i++);
10551 if (!validate_arg (arg, code))
10552 goto end;
10553 break;
10556 while (1);
10558 /* We need gotos here since we can only have one VA_CLOSE in a
10559 function. */
10560 end: ;
10561 va_end (ap);
10563 return res;
10566 /* Default target-specific builtin expander that does nothing. */
10569 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10570 rtx target ATTRIBUTE_UNUSED,
10571 rtx subtarget ATTRIBUTE_UNUSED,
10572 machine_mode mode ATTRIBUTE_UNUSED,
10573 int ignore ATTRIBUTE_UNUSED)
10575 return NULL_RTX;
10578 /* Returns true is EXP represents data that would potentially reside
10579 in a readonly section. */
10581 bool
10582 readonly_data_expr (tree exp)
10584 STRIP_NOPS (exp);
10586 if (TREE_CODE (exp) != ADDR_EXPR)
10587 return false;
10589 exp = get_base_address (TREE_OPERAND (exp, 0));
10590 if (!exp)
10591 return false;
10593 /* Make sure we call decl_readonly_section only for trees it
10594 can handle (since it returns true for everything it doesn't
10595 understand). */
10596 if (TREE_CODE (exp) == STRING_CST
10597 || TREE_CODE (exp) == CONSTRUCTOR
10598 || (VAR_P (exp) && TREE_STATIC (exp)))
10599 return decl_readonly_section (exp, 0);
10600 else
10601 return false;
10604 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10605 to the call, and TYPE is its return type.
10607 Return NULL_TREE if no simplification was possible, otherwise return the
10608 simplified form of the call as a tree.
10610 The simplified form may be a constant or other expression which
10611 computes the same value, but in a more efficient manner (including
10612 calls to other builtin functions).
10614 The call may contain arguments which need to be evaluated, but
10615 which are not useful to determine the result of the call. In
10616 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10617 COMPOUND_EXPR will be an argument which must be evaluated.
10618 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10619 COMPOUND_EXPR in the chain will contain the tree for the simplified
10620 form of the builtin function call. */
10622 static tree
10623 fold_builtin_strpbrk (location_t loc, tree expr, tree s1, tree s2, tree type)
10625 if (!validate_arg (s1, POINTER_TYPE)
10626 || !validate_arg (s2, POINTER_TYPE))
10627 return NULL_TREE;
10629 if (!check_nul_terminated_array (expr, s1)
10630 || !check_nul_terminated_array (expr, s2))
10631 return NULL_TREE;
10633 tree fn;
10634 const char *p1, *p2;
10636 p2 = c_getstr (s2);
10637 if (p2 == NULL)
10638 return NULL_TREE;
10640 p1 = c_getstr (s1);
10641 if (p1 != NULL)
10643 const char *r = strpbrk (p1, p2);
10644 tree tem;
10646 if (r == NULL)
10647 return build_int_cst (TREE_TYPE (s1), 0);
10649 /* Return an offset into the constant string argument. */
10650 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10651 return fold_convert_loc (loc, type, tem);
10654 if (p2[0] == '\0')
10655 /* strpbrk(x, "") == NULL.
10656 Evaluate and ignore s1 in case it had side-effects. */
10657 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
10659 if (p2[1] != '\0')
10660 return NULL_TREE; /* Really call strpbrk. */
10662 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10663 if (!fn)
10664 return NULL_TREE;
10666 /* New argument list transforming strpbrk(s1, s2) to
10667 strchr(s1, s2[0]). */
10668 return build_call_expr_loc (loc, fn, 2, s1,
10669 build_int_cst (integer_type_node, p2[0]));
10672 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10673 to the call.
10675 Return NULL_TREE if no simplification was possible, otherwise return the
10676 simplified form of the call as a tree.
10678 The simplified form may be a constant or other expression which
10679 computes the same value, but in a more efficient manner (including
10680 calls to other builtin functions).
10682 The call may contain arguments which need to be evaluated, but
10683 which are not useful to determine the result of the call. In
10684 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10685 COMPOUND_EXPR will be an argument which must be evaluated.
10686 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10687 COMPOUND_EXPR in the chain will contain the tree for the simplified
10688 form of the builtin function call. */
10690 static tree
10691 fold_builtin_strspn (location_t loc, tree expr, tree s1, tree s2)
10693 if (!validate_arg (s1, POINTER_TYPE)
10694 || !validate_arg (s2, POINTER_TYPE))
10695 return NULL_TREE;
10697 if (!check_nul_terminated_array (expr, s1)
10698 || !check_nul_terminated_array (expr, s2))
10699 return NULL_TREE;
10701 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10703 /* If either argument is "", return NULL_TREE. */
10704 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10705 /* Evaluate and ignore both arguments in case either one has
10706 side-effects. */
10707 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
10708 s1, s2);
10709 return NULL_TREE;
10712 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10713 to the call.
10715 Return NULL_TREE if no simplification was possible, otherwise return the
10716 simplified form of the call as a tree.
10718 The simplified form may be a constant or other expression which
10719 computes the same value, but in a more efficient manner (including
10720 calls to other builtin functions).
10722 The call may contain arguments which need to be evaluated, but
10723 which are not useful to determine the result of the call. In
10724 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10725 COMPOUND_EXPR will be an argument which must be evaluated.
10726 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10727 COMPOUND_EXPR in the chain will contain the tree for the simplified
10728 form of the builtin function call. */
10730 static tree
10731 fold_builtin_strcspn (location_t loc, tree expr, tree s1, tree s2)
10733 if (!validate_arg (s1, POINTER_TYPE)
10734 || !validate_arg (s2, POINTER_TYPE))
10735 return NULL_TREE;
10737 if (!check_nul_terminated_array (expr, s1)
10738 || !check_nul_terminated_array (expr, s2))
10739 return NULL_TREE;
10741 /* If the first argument is "", return NULL_TREE. */
10742 const char *p1 = c_getstr (s1);
10743 if (p1 && *p1 == '\0')
10745 /* Evaluate and ignore argument s2 in case it has
10746 side-effects. */
10747 return omit_one_operand_loc (loc, size_type_node,
10748 size_zero_node, s2);
10751 /* If the second argument is "", return __builtin_strlen(s1). */
10752 const char *p2 = c_getstr (s2);
10753 if (p2 && *p2 == '\0')
10755 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
10757 /* If the replacement _DECL isn't initialized, don't do the
10758 transformation. */
10759 if (!fn)
10760 return NULL_TREE;
10762 return build_call_expr_loc (loc, fn, 1, s1);
10764 return NULL_TREE;
10767 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10768 produced. False otherwise. This is done so that we don't output the error
10769 or warning twice or three times. */
10771 bool
10772 fold_builtin_next_arg (tree exp, bool va_start_p)
10774 tree fntype = TREE_TYPE (current_function_decl);
10775 int nargs = call_expr_nargs (exp);
10776 tree arg;
10777 /* There is good chance the current input_location points inside the
10778 definition of the va_start macro (perhaps on the token for
10779 builtin) in a system header, so warnings will not be emitted.
10780 Use the location in real source code. */
10781 location_t current_location =
10782 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10783 NULL);
10785 if (!stdarg_p (fntype))
10787 error ("%<va_start%> used in function with fixed arguments");
10788 return true;
10791 if (va_start_p)
10793 if (va_start_p && (nargs != 2))
10795 error ("wrong number of arguments to function %<va_start%>");
10796 return true;
10798 arg = CALL_EXPR_ARG (exp, 1);
10800 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10801 when we checked the arguments and if needed issued a warning. */
10802 else
10804 if (nargs == 0)
10806 /* Evidently an out of date version of <stdarg.h>; can't validate
10807 va_start's second argument, but can still work as intended. */
10808 warning_at (current_location,
10809 OPT_Wvarargs,
10810 "%<__builtin_next_arg%> called without an argument");
10811 return true;
10813 else if (nargs > 1)
10815 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10816 return true;
10818 arg = CALL_EXPR_ARG (exp, 0);
10821 if (TREE_CODE (arg) == SSA_NAME)
10822 arg = SSA_NAME_VAR (arg);
10824 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10825 or __builtin_next_arg (0) the first time we see it, after checking
10826 the arguments and if needed issuing a warning. */
10827 if (!integer_zerop (arg))
10829 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10831 /* Strip off all nops for the sake of the comparison. This
10832 is not quite the same as STRIP_NOPS. It does more.
10833 We must also strip off INDIRECT_EXPR for C++ reference
10834 parameters. */
10835 while (CONVERT_EXPR_P (arg)
10836 || TREE_CODE (arg) == INDIRECT_REF)
10837 arg = TREE_OPERAND (arg, 0);
10838 if (arg != last_parm)
10840 /* FIXME: Sometimes with the tree optimizers we can get the
10841 not the last argument even though the user used the last
10842 argument. We just warn and set the arg to be the last
10843 argument so that we will get wrong-code because of
10844 it. */
10845 warning_at (current_location,
10846 OPT_Wvarargs,
10847 "second parameter of %<va_start%> not last named argument");
10850 /* Undefined by C99 7.15.1.4p4 (va_start):
10851 "If the parameter parmN is declared with the register storage
10852 class, with a function or array type, or with a type that is
10853 not compatible with the type that results after application of
10854 the default argument promotions, the behavior is undefined."
10856 else if (DECL_REGISTER (arg))
10858 warning_at (current_location,
10859 OPT_Wvarargs,
10860 "undefined behavior when second parameter of "
10861 "%<va_start%> is declared with %<register%> storage");
10864 /* We want to verify the second parameter just once before the tree
10865 optimizers are run and then avoid keeping it in the tree,
10866 as otherwise we could warn even for correct code like:
10867 void foo (int i, ...)
10868 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10869 if (va_start_p)
10870 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10871 else
10872 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10874 return false;
10878 /* Expand a call EXP to __builtin_object_size. */
10880 static rtx
10881 expand_builtin_object_size (tree exp)
10883 tree ost;
10884 int object_size_type;
10885 tree fndecl = get_callee_fndecl (exp);
10887 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10889 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
10890 exp, fndecl);
10891 expand_builtin_trap ();
10892 return const0_rtx;
10895 ost = CALL_EXPR_ARG (exp, 1);
10896 STRIP_NOPS (ost);
10898 if (TREE_CODE (ost) != INTEGER_CST
10899 || tree_int_cst_sgn (ost) < 0
10900 || compare_tree_int (ost, 3) > 0)
10902 error ("%Klast argument of %qD is not integer constant between 0 and 3",
10903 exp, fndecl);
10904 expand_builtin_trap ();
10905 return const0_rtx;
10908 object_size_type = tree_to_shwi (ost);
10910 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10913 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10914 FCODE is the BUILT_IN_* to use.
10915 Return NULL_RTX if we failed; the caller should emit a normal call,
10916 otherwise try to get the result in TARGET, if convenient (and in
10917 mode MODE if that's convenient). */
10919 static rtx
10920 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10921 enum built_in_function fcode)
10923 if (!validate_arglist (exp,
10924 POINTER_TYPE,
10925 fcode == BUILT_IN_MEMSET_CHK
10926 ? INTEGER_TYPE : POINTER_TYPE,
10927 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10928 return NULL_RTX;
10930 tree dest = CALL_EXPR_ARG (exp, 0);
10931 tree src = CALL_EXPR_ARG (exp, 1);
10932 tree len = CALL_EXPR_ARG (exp, 2);
10933 tree size = CALL_EXPR_ARG (exp, 3);
10935 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
10936 /*str=*/NULL_TREE, size);
10938 if (!tree_fits_uhwi_p (size))
10939 return NULL_RTX;
10941 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10943 /* Avoid transforming the checking call to an ordinary one when
10944 an overflow has been detected or when the call couldn't be
10945 validated because the size is not constant. */
10946 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10947 return NULL_RTX;
10949 tree fn = NULL_TREE;
10950 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10951 mem{cpy,pcpy,move,set} is available. */
10952 switch (fcode)
10954 case BUILT_IN_MEMCPY_CHK:
10955 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10956 break;
10957 case BUILT_IN_MEMPCPY_CHK:
10958 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10959 break;
10960 case BUILT_IN_MEMMOVE_CHK:
10961 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10962 break;
10963 case BUILT_IN_MEMSET_CHK:
10964 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10965 break;
10966 default:
10967 break;
10970 if (! fn)
10971 return NULL_RTX;
10973 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10974 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10975 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10976 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10978 else if (fcode == BUILT_IN_MEMSET_CHK)
10979 return NULL_RTX;
10980 else
10982 unsigned int dest_align = get_pointer_alignment (dest);
10984 /* If DEST is not a pointer type, call the normal function. */
10985 if (dest_align == 0)
10986 return NULL_RTX;
10988 /* If SRC and DEST are the same (and not volatile), do nothing. */
10989 if (operand_equal_p (src, dest, 0))
10991 tree expr;
10993 if (fcode != BUILT_IN_MEMPCPY_CHK)
10995 /* Evaluate and ignore LEN in case it has side-effects. */
10996 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10997 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11000 expr = fold_build_pointer_plus (dest, len);
11001 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11004 /* __memmove_chk special case. */
11005 if (fcode == BUILT_IN_MEMMOVE_CHK)
11007 unsigned int src_align = get_pointer_alignment (src);
11009 if (src_align == 0)
11010 return NULL_RTX;
11012 /* If src is categorized for a readonly section we can use
11013 normal __memcpy_chk. */
11014 if (readonly_data_expr (src))
11016 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11017 if (!fn)
11018 return NULL_RTX;
11019 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11020 dest, src, len, size);
11021 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11022 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11023 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11026 return NULL_RTX;
11030 /* Emit warning if a buffer overflow is detected at compile time. */
11032 static void
11033 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11035 /* The source string. */
11036 tree srcstr = NULL_TREE;
11037 /* The size of the destination object. */
11038 tree objsize = NULL_TREE;
11039 /* The string that is being concatenated with (as in __strcat_chk)
11040 or null if it isn't. */
11041 tree catstr = NULL_TREE;
11042 /* The maximum length of the source sequence in a bounded operation
11043 (such as __strncat_chk) or null if the operation isn't bounded
11044 (such as __strcat_chk). */
11045 tree maxread = NULL_TREE;
11046 /* The exact size of the access (such as in __strncpy_chk). */
11047 tree size = NULL_TREE;
11049 switch (fcode)
11051 case BUILT_IN_STRCPY_CHK:
11052 case BUILT_IN_STPCPY_CHK:
11053 srcstr = CALL_EXPR_ARG (exp, 1);
11054 objsize = CALL_EXPR_ARG (exp, 2);
11055 break;
11057 case BUILT_IN_STRCAT_CHK:
11058 /* For __strcat_chk the warning will be emitted only if overflowing
11059 by at least strlen (dest) + 1 bytes. */
11060 catstr = CALL_EXPR_ARG (exp, 0);
11061 srcstr = CALL_EXPR_ARG (exp, 1);
11062 objsize = CALL_EXPR_ARG (exp, 2);
11063 break;
11065 case BUILT_IN_STRNCAT_CHK:
11066 catstr = CALL_EXPR_ARG (exp, 0);
11067 srcstr = CALL_EXPR_ARG (exp, 1);
11068 maxread = CALL_EXPR_ARG (exp, 2);
11069 objsize = CALL_EXPR_ARG (exp, 3);
11070 break;
11072 case BUILT_IN_STRNCPY_CHK:
11073 case BUILT_IN_STPNCPY_CHK:
11074 srcstr = CALL_EXPR_ARG (exp, 1);
11075 size = CALL_EXPR_ARG (exp, 2);
11076 objsize = CALL_EXPR_ARG (exp, 3);
11077 break;
11079 case BUILT_IN_SNPRINTF_CHK:
11080 case BUILT_IN_VSNPRINTF_CHK:
11081 maxread = CALL_EXPR_ARG (exp, 1);
11082 objsize = CALL_EXPR_ARG (exp, 3);
11083 break;
11084 default:
11085 gcc_unreachable ();
11088 if (catstr && maxread)
11090 /* Check __strncat_chk. There is no way to determine the length
11091 of the string to which the source string is being appended so
11092 just warn when the length of the source string is not known. */
11093 check_strncat_sizes (exp, objsize);
11094 return;
11097 /* The destination argument is the first one for all built-ins above. */
11098 tree dst = CALL_EXPR_ARG (exp, 0);
11100 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
11103 /* Emit warning if a buffer overflow is detected at compile time
11104 in __sprintf_chk/__vsprintf_chk calls. */
11106 static void
11107 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11109 tree size, len, fmt;
11110 const char *fmt_str;
11111 int nargs = call_expr_nargs (exp);
11113 /* Verify the required arguments in the original call. */
11115 if (nargs < 4)
11116 return;
11117 size = CALL_EXPR_ARG (exp, 2);
11118 fmt = CALL_EXPR_ARG (exp, 3);
11120 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11121 return;
11123 /* Check whether the format is a literal string constant. */
11124 fmt_str = c_getstr (fmt);
11125 if (fmt_str == NULL)
11126 return;
11128 if (!init_target_chars ())
11129 return;
11131 /* If the format doesn't contain % args or %%, we know its size. */
11132 if (strchr (fmt_str, target_percent) == 0)
11133 len = build_int_cstu (size_type_node, strlen (fmt_str));
11134 /* If the format is "%s" and first ... argument is a string literal,
11135 we know it too. */
11136 else if (fcode == BUILT_IN_SPRINTF_CHK
11137 && strcmp (fmt_str, target_percent_s) == 0)
11139 tree arg;
11141 if (nargs < 5)
11142 return;
11143 arg = CALL_EXPR_ARG (exp, 4);
11144 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11145 return;
11147 len = c_strlen (arg, 1);
11148 if (!len || ! tree_fits_uhwi_p (len))
11149 return;
11151 else
11152 return;
11154 /* Add one for the terminating nul. */
11155 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
11157 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
11158 /*maxread=*/NULL_TREE, len, size);
11161 /* Emit warning if a free is called with address of a variable. */
11163 static void
11164 maybe_emit_free_warning (tree exp)
11166 if (call_expr_nargs (exp) != 1)
11167 return;
11169 tree arg = CALL_EXPR_ARG (exp, 0);
11171 STRIP_NOPS (arg);
11172 if (TREE_CODE (arg) != ADDR_EXPR)
11173 return;
11175 arg = get_base_address (TREE_OPERAND (arg, 0));
11176 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11177 return;
11179 if (SSA_VAR_P (arg))
11180 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11181 "%Kattempt to free a non-heap object %qD", exp, arg);
11182 else
11183 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11184 "%Kattempt to free a non-heap object", exp);
11187 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11188 if possible. */
11190 static tree
11191 fold_builtin_object_size (tree ptr, tree ost)
11193 unsigned HOST_WIDE_INT bytes;
11194 int object_size_type;
11196 if (!validate_arg (ptr, POINTER_TYPE)
11197 || !validate_arg (ost, INTEGER_TYPE))
11198 return NULL_TREE;
11200 STRIP_NOPS (ost);
11202 if (TREE_CODE (ost) != INTEGER_CST
11203 || tree_int_cst_sgn (ost) < 0
11204 || compare_tree_int (ost, 3) > 0)
11205 return NULL_TREE;
11207 object_size_type = tree_to_shwi (ost);
11209 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11210 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11211 and (size_t) 0 for types 2 and 3. */
11212 if (TREE_SIDE_EFFECTS (ptr))
11213 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11215 if (TREE_CODE (ptr) == ADDR_EXPR)
11217 compute_builtin_object_size (ptr, object_size_type, &bytes);
11218 if (wi::fits_to_tree_p (bytes, size_type_node))
11219 return build_int_cstu (size_type_node, bytes);
11221 else if (TREE_CODE (ptr) == SSA_NAME)
11223 /* If object size is not known yet, delay folding until
11224 later. Maybe subsequent passes will help determining
11225 it. */
11226 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
11227 && wi::fits_to_tree_p (bytes, size_type_node))
11228 return build_int_cstu (size_type_node, bytes);
11231 return NULL_TREE;
11234 /* Builtins with folding operations that operate on "..." arguments
11235 need special handling; we need to store the arguments in a convenient
11236 data structure before attempting any folding. Fortunately there are
11237 only a few builtins that fall into this category. FNDECL is the
11238 function, EXP is the CALL_EXPR for the call. */
11240 static tree
11241 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11243 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11244 tree ret = NULL_TREE;
11246 switch (fcode)
11248 case BUILT_IN_FPCLASSIFY:
11249 ret = fold_builtin_fpclassify (loc, args, nargs);
11250 break;
11252 default:
11253 break;
11255 if (ret)
11257 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11258 SET_EXPR_LOCATION (ret, loc);
11259 TREE_NO_WARNING (ret) = 1;
11260 return ret;
11262 return NULL_TREE;
11265 /* Initialize format string characters in the target charset. */
11267 bool
11268 init_target_chars (void)
11270 static bool init;
11271 if (!init)
11273 target_newline = lang_hooks.to_target_charset ('\n');
11274 target_percent = lang_hooks.to_target_charset ('%');
11275 target_c = lang_hooks.to_target_charset ('c');
11276 target_s = lang_hooks.to_target_charset ('s');
11277 if (target_newline == 0 || target_percent == 0 || target_c == 0
11278 || target_s == 0)
11279 return false;
11281 target_percent_c[0] = target_percent;
11282 target_percent_c[1] = target_c;
11283 target_percent_c[2] = '\0';
11285 target_percent_s[0] = target_percent;
11286 target_percent_s[1] = target_s;
11287 target_percent_s[2] = '\0';
11289 target_percent_s_newline[0] = target_percent;
11290 target_percent_s_newline[1] = target_s;
11291 target_percent_s_newline[2] = target_newline;
11292 target_percent_s_newline[3] = '\0';
11294 init = true;
11296 return true;
11299 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11300 and no overflow/underflow occurred. INEXACT is true if M was not
11301 exactly calculated. TYPE is the tree type for the result. This
11302 function assumes that you cleared the MPFR flags and then
11303 calculated M to see if anything subsequently set a flag prior to
11304 entering this function. Return NULL_TREE if any checks fail. */
11306 static tree
11307 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11309 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11310 overflow/underflow occurred. If -frounding-math, proceed iff the
11311 result of calling FUNC was exact. */
11312 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11313 && (!flag_rounding_math || !inexact))
11315 REAL_VALUE_TYPE rr;
11317 real_from_mpfr (&rr, m, type, MPFR_RNDN);
11318 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11319 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11320 but the mpft_t is not, then we underflowed in the
11321 conversion. */
11322 if (real_isfinite (&rr)
11323 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11325 REAL_VALUE_TYPE rmode;
11327 real_convert (&rmode, TYPE_MODE (type), &rr);
11328 /* Proceed iff the specified mode can hold the value. */
11329 if (real_identical (&rmode, &rr))
11330 return build_real (type, rmode);
11333 return NULL_TREE;
11336 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11337 number and no overflow/underflow occurred. INEXACT is true if M
11338 was not exactly calculated. TYPE is the tree type for the result.
11339 This function assumes that you cleared the MPFR flags and then
11340 calculated M to see if anything subsequently set a flag prior to
11341 entering this function. Return NULL_TREE if any checks fail, if
11342 FORCE_CONVERT is true, then bypass the checks. */
11344 static tree
11345 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11347 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11348 overflow/underflow occurred. If -frounding-math, proceed iff the
11349 result of calling FUNC was exact. */
11350 if (force_convert
11351 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11352 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11353 && (!flag_rounding_math || !inexact)))
11355 REAL_VALUE_TYPE re, im;
11357 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), MPFR_RNDN);
11358 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), MPFR_RNDN);
11359 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11360 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11361 but the mpft_t is not, then we underflowed in the
11362 conversion. */
11363 if (force_convert
11364 || (real_isfinite (&re) && real_isfinite (&im)
11365 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11366 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11368 REAL_VALUE_TYPE re_mode, im_mode;
11370 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11371 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11372 /* Proceed iff the specified mode can hold the value. */
11373 if (force_convert
11374 || (real_identical (&re_mode, &re)
11375 && real_identical (&im_mode, &im)))
11376 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11377 build_real (TREE_TYPE (type), im_mode));
11380 return NULL_TREE;
11383 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
11384 the pointer *(ARG_QUO) and return the result. The type is taken
11385 from the type of ARG0 and is used for setting the precision of the
11386 calculation and results. */
11388 static tree
11389 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
11391 tree const type = TREE_TYPE (arg0);
11392 tree result = NULL_TREE;
11394 STRIP_NOPS (arg0);
11395 STRIP_NOPS (arg1);
11397 /* To proceed, MPFR must exactly represent the target floating point
11398 format, which only happens when the target base equals two. */
11399 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11400 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
11401 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
11403 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
11404 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
11406 if (real_isfinite (ra0) && real_isfinite (ra1))
11408 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11409 const int prec = fmt->p;
11410 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
11411 tree result_rem;
11412 long integer_quo;
11413 mpfr_t m0, m1;
11415 mpfr_inits2 (prec, m0, m1, NULL);
11416 mpfr_from_real (m0, ra0, MPFR_RNDN);
11417 mpfr_from_real (m1, ra1, MPFR_RNDN);
11418 mpfr_clear_flags ();
11419 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
11420 /* Remquo is independent of the rounding mode, so pass
11421 inexact=0 to do_mpfr_ckconv(). */
11422 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
11423 mpfr_clears (m0, m1, NULL);
11424 if (result_rem)
11426 /* MPFR calculates quo in the host's long so it may
11427 return more bits in quo than the target int can hold
11428 if sizeof(host long) > sizeof(target int). This can
11429 happen even for native compilers in LP64 mode. In
11430 these cases, modulo the quo value with the largest
11431 number that the target int can hold while leaving one
11432 bit for the sign. */
11433 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
11434 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
11436 /* Dereference the quo pointer argument. */
11437 arg_quo = build_fold_indirect_ref (arg_quo);
11438 /* Proceed iff a valid pointer type was passed in. */
11439 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
11441 /* Set the value. */
11442 tree result_quo
11443 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
11444 build_int_cst (TREE_TYPE (arg_quo),
11445 integer_quo));
11446 TREE_SIDE_EFFECTS (result_quo) = 1;
11447 /* Combine the quo assignment with the rem. */
11448 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11449 result_quo, result_rem));
11454 return result;
11457 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
11458 resulting value as a tree with type TYPE. The mpfr precision is
11459 set to the precision of TYPE. We assume that this mpfr function
11460 returns zero if the result could be calculated exactly within the
11461 requested precision. In addition, the integer pointer represented
11462 by ARG_SG will be dereferenced and set to the appropriate signgam
11463 (-1,1) value. */
11465 static tree
11466 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
11468 tree result = NULL_TREE;
11470 STRIP_NOPS (arg);
11472 /* To proceed, MPFR must exactly represent the target floating point
11473 format, which only happens when the target base equals two. Also
11474 verify ARG is a constant and that ARG_SG is an int pointer. */
11475 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11476 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
11477 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
11478 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
11480 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
11482 /* In addition to NaN and Inf, the argument cannot be zero or a
11483 negative integer. */
11484 if (real_isfinite (ra)
11485 && ra->cl != rvc_zero
11486 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
11488 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11489 const int prec = fmt->p;
11490 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
11491 int inexact, sg;
11492 mpfr_t m;
11493 tree result_lg;
11495 mpfr_init2 (m, prec);
11496 mpfr_from_real (m, ra, MPFR_RNDN);
11497 mpfr_clear_flags ();
11498 inexact = mpfr_lgamma (m, &sg, m, rnd);
11499 result_lg = do_mpfr_ckconv (m, type, inexact);
11500 mpfr_clear (m);
11501 if (result_lg)
11503 tree result_sg;
11505 /* Dereference the arg_sg pointer argument. */
11506 arg_sg = build_fold_indirect_ref (arg_sg);
11507 /* Assign the signgam value into *arg_sg. */
11508 result_sg = fold_build2 (MODIFY_EXPR,
11509 TREE_TYPE (arg_sg), arg_sg,
11510 build_int_cst (TREE_TYPE (arg_sg), sg));
11511 TREE_SIDE_EFFECTS (result_sg) = 1;
11512 /* Combine the signgam assignment with the lgamma result. */
11513 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11514 result_sg, result_lg));
11519 return result;
11522 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
11523 mpc function FUNC on it and return the resulting value as a tree
11524 with type TYPE. The mpfr precision is set to the precision of
11525 TYPE. We assume that function FUNC returns zero if the result
11526 could be calculated exactly within the requested precision. If
11527 DO_NONFINITE is true, then fold expressions containing Inf or NaN
11528 in the arguments and/or results. */
11530 tree
11531 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
11532 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
11534 tree result = NULL_TREE;
11536 STRIP_NOPS (arg0);
11537 STRIP_NOPS (arg1);
11539 /* To proceed, MPFR must exactly represent the target floating point
11540 format, which only happens when the target base equals two. */
11541 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
11542 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
11543 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
11544 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
11545 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
11547 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
11548 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
11549 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
11550 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
11552 if (do_nonfinite
11553 || (real_isfinite (re0) && real_isfinite (im0)
11554 && real_isfinite (re1) && real_isfinite (im1)))
11556 const struct real_format *const fmt =
11557 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11558 const int prec = fmt->p;
11559 const mpfr_rnd_t rnd = fmt->round_towards_zero
11560 ? MPFR_RNDZ : MPFR_RNDN;
11561 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11562 int inexact;
11563 mpc_t m0, m1;
11565 mpc_init2 (m0, prec);
11566 mpc_init2 (m1, prec);
11567 mpfr_from_real (mpc_realref (m0), re0, rnd);
11568 mpfr_from_real (mpc_imagref (m0), im0, rnd);
11569 mpfr_from_real (mpc_realref (m1), re1, rnd);
11570 mpfr_from_real (mpc_imagref (m1), im1, rnd);
11571 mpfr_clear_flags ();
11572 inexact = func (m0, m0, m1, crnd);
11573 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
11574 mpc_clear (m0);
11575 mpc_clear (m1);
11579 return result;
11582 /* A wrapper function for builtin folding that prevents warnings for
11583 "statement without effect" and the like, caused by removing the
11584 call node earlier than the warning is generated. */
11586 tree
11587 fold_call_stmt (gcall *stmt, bool ignore)
11589 tree ret = NULL_TREE;
11590 tree fndecl = gimple_call_fndecl (stmt);
11591 location_t loc = gimple_location (stmt);
11592 if (fndecl && fndecl_built_in_p (fndecl)
11593 && !gimple_call_va_arg_pack_p (stmt))
11595 int nargs = gimple_call_num_args (stmt);
11596 tree *args = (nargs > 0
11597 ? gimple_call_arg_ptr (stmt, 0)
11598 : &error_mark_node);
11600 if (avoid_folding_inline_builtin (fndecl))
11601 return NULL_TREE;
11602 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11604 return targetm.fold_builtin (fndecl, nargs, args, ignore);
11606 else
11608 ret = fold_builtin_n (loc, NULL_TREE, fndecl, args, nargs, ignore);
11609 if (ret)
11611 /* Propagate location information from original call to
11612 expansion of builtin. Otherwise things like
11613 maybe_emit_chk_warning, that operate on the expansion
11614 of a builtin, will use the wrong location information. */
11615 if (gimple_has_location (stmt))
11617 tree realret = ret;
11618 if (TREE_CODE (ret) == NOP_EXPR)
11619 realret = TREE_OPERAND (ret, 0);
11620 if (CAN_HAVE_LOCATION_P (realret)
11621 && !EXPR_HAS_LOCATION (realret))
11622 SET_EXPR_LOCATION (realret, loc);
11623 return realret;
11625 return ret;
11629 return NULL_TREE;
11632 /* Look up the function in builtin_decl that corresponds to DECL
11633 and set ASMSPEC as its user assembler name. DECL must be a
11634 function decl that declares a builtin. */
11636 void
11637 set_builtin_user_assembler_name (tree decl, const char *asmspec)
11639 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
11640 && asmspec != 0);
11642 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
11643 set_user_assembler_name (builtin, asmspec);
11645 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
11646 && INT_TYPE_SIZE < BITS_PER_WORD)
11648 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
11649 set_user_assembler_libfunc ("ffs", asmspec);
11650 set_optab_libfunc (ffs_optab, mode, "ffs");
11654 /* Return true if DECL is a builtin that expands to a constant or similarly
11655 simple code. */
11656 bool
11657 is_simple_builtin (tree decl)
11659 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
11660 switch (DECL_FUNCTION_CODE (decl))
11662 /* Builtins that expand to constants. */
11663 case BUILT_IN_CONSTANT_P:
11664 case BUILT_IN_EXPECT:
11665 case BUILT_IN_OBJECT_SIZE:
11666 case BUILT_IN_UNREACHABLE:
11667 /* Simple register moves or loads from stack. */
11668 case BUILT_IN_ASSUME_ALIGNED:
11669 case BUILT_IN_RETURN_ADDRESS:
11670 case BUILT_IN_EXTRACT_RETURN_ADDR:
11671 case BUILT_IN_FROB_RETURN_ADDR:
11672 case BUILT_IN_RETURN:
11673 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
11674 case BUILT_IN_FRAME_ADDRESS:
11675 case BUILT_IN_VA_END:
11676 case BUILT_IN_STACK_SAVE:
11677 case BUILT_IN_STACK_RESTORE:
11678 /* Exception state returns or moves registers around. */
11679 case BUILT_IN_EH_FILTER:
11680 case BUILT_IN_EH_POINTER:
11681 case BUILT_IN_EH_COPY_VALUES:
11682 return true;
11684 default:
11685 return false;
11688 return false;
11691 /* Return true if DECL is a builtin that is not expensive, i.e., they are
11692 most probably expanded inline into reasonably simple code. This is a
11693 superset of is_simple_builtin. */
11694 bool
11695 is_inexpensive_builtin (tree decl)
11697 if (!decl)
11698 return false;
11699 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
11700 return true;
11701 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11702 switch (DECL_FUNCTION_CODE (decl))
11704 case BUILT_IN_ABS:
11705 CASE_BUILT_IN_ALLOCA:
11706 case BUILT_IN_BSWAP16:
11707 case BUILT_IN_BSWAP32:
11708 case BUILT_IN_BSWAP64:
11709 case BUILT_IN_CLZ:
11710 case BUILT_IN_CLZIMAX:
11711 case BUILT_IN_CLZL:
11712 case BUILT_IN_CLZLL:
11713 case BUILT_IN_CTZ:
11714 case BUILT_IN_CTZIMAX:
11715 case BUILT_IN_CTZL:
11716 case BUILT_IN_CTZLL:
11717 case BUILT_IN_FFS:
11718 case BUILT_IN_FFSIMAX:
11719 case BUILT_IN_FFSL:
11720 case BUILT_IN_FFSLL:
11721 case BUILT_IN_IMAXABS:
11722 case BUILT_IN_FINITE:
11723 case BUILT_IN_FINITEF:
11724 case BUILT_IN_FINITEL:
11725 case BUILT_IN_FINITED32:
11726 case BUILT_IN_FINITED64:
11727 case BUILT_IN_FINITED128:
11728 case BUILT_IN_FPCLASSIFY:
11729 case BUILT_IN_ISFINITE:
11730 case BUILT_IN_ISINF_SIGN:
11731 case BUILT_IN_ISINF:
11732 case BUILT_IN_ISINFF:
11733 case BUILT_IN_ISINFL:
11734 case BUILT_IN_ISINFD32:
11735 case BUILT_IN_ISINFD64:
11736 case BUILT_IN_ISINFD128:
11737 case BUILT_IN_ISNAN:
11738 case BUILT_IN_ISNANF:
11739 case BUILT_IN_ISNANL:
11740 case BUILT_IN_ISNAND32:
11741 case BUILT_IN_ISNAND64:
11742 case BUILT_IN_ISNAND128:
11743 case BUILT_IN_ISNORMAL:
11744 case BUILT_IN_ISGREATER:
11745 case BUILT_IN_ISGREATEREQUAL:
11746 case BUILT_IN_ISLESS:
11747 case BUILT_IN_ISLESSEQUAL:
11748 case BUILT_IN_ISLESSGREATER:
11749 case BUILT_IN_ISUNORDERED:
11750 case BUILT_IN_VA_ARG_PACK:
11751 case BUILT_IN_VA_ARG_PACK_LEN:
11752 case BUILT_IN_VA_COPY:
11753 case BUILT_IN_TRAP:
11754 case BUILT_IN_SAVEREGS:
11755 case BUILT_IN_POPCOUNTL:
11756 case BUILT_IN_POPCOUNTLL:
11757 case BUILT_IN_POPCOUNTIMAX:
11758 case BUILT_IN_POPCOUNT:
11759 case BUILT_IN_PARITYL:
11760 case BUILT_IN_PARITYLL:
11761 case BUILT_IN_PARITYIMAX:
11762 case BUILT_IN_PARITY:
11763 case BUILT_IN_LABS:
11764 case BUILT_IN_LLABS:
11765 case BUILT_IN_PREFETCH:
11766 case BUILT_IN_ACC_ON_DEVICE:
11767 return true;
11769 default:
11770 return is_simple_builtin (decl);
11773 return false;
11776 /* Return true if T is a constant and the value cast to a target char
11777 can be represented by a host char.
11778 Store the casted char constant in *P if so. */
11780 bool
11781 target_char_cst_p (tree t, char *p)
11783 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
11784 return false;
11786 *p = (char)tree_to_uhwi (t);
11787 return true;
11790 /* Return true if the builtin DECL is implemented in a standard library.
11791 Otherwise returns false which doesn't guarantee it is not (thus the list of
11792 handled builtins below may be incomplete). */
11794 bool
11795 builtin_with_linkage_p (tree decl)
11797 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11798 switch (DECL_FUNCTION_CODE (decl))
11800 CASE_FLT_FN (BUILT_IN_ACOS):
11801 CASE_FLT_FN (BUILT_IN_ACOSH):
11802 CASE_FLT_FN (BUILT_IN_ASIN):
11803 CASE_FLT_FN (BUILT_IN_ASINH):
11804 CASE_FLT_FN (BUILT_IN_ATAN):
11805 CASE_FLT_FN (BUILT_IN_ATANH):
11806 CASE_FLT_FN (BUILT_IN_ATAN2):
11807 CASE_FLT_FN (BUILT_IN_CBRT):
11808 CASE_FLT_FN (BUILT_IN_CEIL):
11809 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
11810 CASE_FLT_FN (BUILT_IN_COPYSIGN):
11811 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
11812 CASE_FLT_FN (BUILT_IN_COS):
11813 CASE_FLT_FN (BUILT_IN_COSH):
11814 CASE_FLT_FN (BUILT_IN_ERF):
11815 CASE_FLT_FN (BUILT_IN_ERFC):
11816 CASE_FLT_FN (BUILT_IN_EXP):
11817 CASE_FLT_FN (BUILT_IN_EXP2):
11818 CASE_FLT_FN (BUILT_IN_EXPM1):
11819 CASE_FLT_FN (BUILT_IN_FABS):
11820 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
11821 CASE_FLT_FN (BUILT_IN_FDIM):
11822 CASE_FLT_FN (BUILT_IN_FLOOR):
11823 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
11824 CASE_FLT_FN (BUILT_IN_FMA):
11825 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
11826 CASE_FLT_FN (BUILT_IN_FMAX):
11827 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
11828 CASE_FLT_FN (BUILT_IN_FMIN):
11829 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
11830 CASE_FLT_FN (BUILT_IN_FMOD):
11831 CASE_FLT_FN (BUILT_IN_FREXP):
11832 CASE_FLT_FN (BUILT_IN_HYPOT):
11833 CASE_FLT_FN (BUILT_IN_ILOGB):
11834 CASE_FLT_FN (BUILT_IN_LDEXP):
11835 CASE_FLT_FN (BUILT_IN_LGAMMA):
11836 CASE_FLT_FN (BUILT_IN_LLRINT):
11837 CASE_FLT_FN (BUILT_IN_LLROUND):
11838 CASE_FLT_FN (BUILT_IN_LOG):
11839 CASE_FLT_FN (BUILT_IN_LOG10):
11840 CASE_FLT_FN (BUILT_IN_LOG1P):
11841 CASE_FLT_FN (BUILT_IN_LOG2):
11842 CASE_FLT_FN (BUILT_IN_LOGB):
11843 CASE_FLT_FN (BUILT_IN_LRINT):
11844 CASE_FLT_FN (BUILT_IN_LROUND):
11845 CASE_FLT_FN (BUILT_IN_MODF):
11846 CASE_FLT_FN (BUILT_IN_NAN):
11847 CASE_FLT_FN (BUILT_IN_NEARBYINT):
11848 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
11849 CASE_FLT_FN (BUILT_IN_NEXTAFTER):
11850 CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
11851 CASE_FLT_FN (BUILT_IN_POW):
11852 CASE_FLT_FN (BUILT_IN_REMAINDER):
11853 CASE_FLT_FN (BUILT_IN_REMQUO):
11854 CASE_FLT_FN (BUILT_IN_RINT):
11855 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
11856 CASE_FLT_FN (BUILT_IN_ROUND):
11857 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
11858 CASE_FLT_FN (BUILT_IN_SCALBLN):
11859 CASE_FLT_FN (BUILT_IN_SCALBN):
11860 CASE_FLT_FN (BUILT_IN_SIN):
11861 CASE_FLT_FN (BUILT_IN_SINH):
11862 CASE_FLT_FN (BUILT_IN_SINCOS):
11863 CASE_FLT_FN (BUILT_IN_SQRT):
11864 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
11865 CASE_FLT_FN (BUILT_IN_TAN):
11866 CASE_FLT_FN (BUILT_IN_TANH):
11867 CASE_FLT_FN (BUILT_IN_TGAMMA):
11868 CASE_FLT_FN (BUILT_IN_TRUNC):
11869 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
11870 return true;
11871 default:
11872 break;
11874 return false;