take type from intrinsic in sincos pass
[official-gcc.git] / gcc / builtins.c
blob284926f884d07919e48705ec057dea148a49e4ce
1 /* Expand builtin functions.
2 Copyright (C) 1988-2020 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-restrict.h"
47 #include "stor-layout.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "tree-object-size.h"
51 #include "tree-ssa-strlen.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
75 #include "tree-dfa.h"
76 #include "gimple-ssa.h"
77 #include "tree-ssa-live.h"
78 #include "tree-outof-ssa.h"
80 struct target_builtins default_target_builtins;
81 #if SWITCHABLE_TARGET
82 struct target_builtins *this_target_builtins = &default_target_builtins;
83 #endif
85 /* Define the names of the builtin function types and codes. */
86 const char *const built_in_class_names[BUILT_IN_LAST]
87 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
89 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
90 const char * built_in_names[(int) END_BUILTINS] =
92 #include "builtins.def"
95 /* Setup an array of builtin_info_type, make sure each element decl is
96 initialized to NULL_TREE. */
97 builtin_info_type builtin_info[(int)END_BUILTINS];
99 /* Non-zero if __builtin_constant_p should be folded right away. */
100 bool force_folding_builtin_constant_p;
102 static int target_char_cast (tree, char *);
103 static rtx get_memory_rtx (tree, tree);
104 static int apply_args_size (void);
105 static int apply_result_size (void);
106 static rtx result_vector (int, rtx);
107 static void expand_builtin_prefetch (tree);
108 static rtx expand_builtin_apply_args (void);
109 static rtx expand_builtin_apply_args_1 (void);
110 static rtx expand_builtin_apply (rtx, rtx, rtx);
111 static void expand_builtin_return (rtx);
112 static enum type_class type_to_class (tree);
113 static rtx expand_builtin_classify_type (tree);
114 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
115 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
116 static rtx expand_builtin_interclass_mathfn (tree, rtx);
117 static rtx expand_builtin_sincos (tree);
118 static rtx expand_builtin_cexpi (tree, rtx);
119 static rtx expand_builtin_int_roundingfn (tree, rtx);
120 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
121 static rtx expand_builtin_next_arg (void);
122 static rtx expand_builtin_va_start (tree);
123 static rtx expand_builtin_va_end (tree);
124 static rtx expand_builtin_va_copy (tree);
125 static rtx inline_expand_builtin_bytecmp (tree, rtx);
126 static rtx expand_builtin_strcmp (tree, rtx);
127 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
128 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
129 static rtx expand_builtin_memchr (tree, rtx);
130 static rtx expand_builtin_memcpy (tree, rtx);
131 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
132 rtx target, tree exp,
133 memop_ret retmode,
134 bool might_overlap);
135 static rtx expand_builtin_memmove (tree, rtx);
136 static rtx expand_builtin_mempcpy (tree, rtx);
137 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
138 static rtx expand_builtin_strcat (tree);
139 static rtx expand_builtin_strcpy (tree, rtx);
140 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
141 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
142 static rtx expand_builtin_stpncpy (tree, rtx);
143 static rtx expand_builtin_strncat (tree, rtx);
144 static rtx expand_builtin_strncpy (tree, rtx);
145 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
146 static rtx expand_builtin_memset (tree, rtx, machine_mode);
147 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
148 static rtx expand_builtin_bzero (tree);
149 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
150 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
151 static rtx expand_builtin_alloca (tree);
152 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
153 static rtx expand_builtin_frame_address (tree, tree);
154 static tree stabilize_va_list_loc (location_t, tree, int);
155 static rtx expand_builtin_expect (tree, rtx);
156 static rtx expand_builtin_expect_with_probability (tree, rtx);
157 static tree fold_builtin_constant_p (tree);
158 static tree fold_builtin_classify_type (tree);
159 static tree fold_builtin_strlen (location_t, tree, tree, tree);
160 static tree fold_builtin_inf (location_t, tree, int);
161 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
162 static bool validate_arg (const_tree, enum tree_code code);
163 static rtx expand_builtin_fabs (tree, rtx, rtx);
164 static rtx expand_builtin_signbit (tree, rtx);
165 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
166 static tree fold_builtin_isascii (location_t, tree);
167 static tree fold_builtin_toascii (location_t, tree);
168 static tree fold_builtin_isdigit (location_t, tree);
169 static tree fold_builtin_fabs (location_t, tree, tree);
170 static tree fold_builtin_abs (location_t, tree, tree);
171 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
172 enum tree_code);
173 static tree fold_builtin_varargs (location_t, tree, tree*, int);
175 static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
176 static tree fold_builtin_strspn (location_t, tree, tree, tree);
177 static tree fold_builtin_strcspn (location_t, tree, tree, tree);
179 static rtx expand_builtin_object_size (tree);
180 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
181 enum built_in_function);
182 static void maybe_emit_chk_warning (tree, enum built_in_function);
183 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
184 static void maybe_emit_free_warning (tree);
185 static tree fold_builtin_object_size (tree, tree);
186 static bool get_range (tree, gimple *, signop, offset_int[2],
187 range_query * = NULL);
188 static bool check_read_access (tree, tree, tree = NULL_TREE, int = 1);
190 unsigned HOST_WIDE_INT target_newline;
191 unsigned HOST_WIDE_INT target_percent;
192 static unsigned HOST_WIDE_INT target_c;
193 static unsigned HOST_WIDE_INT target_s;
194 char target_percent_c[3];
195 char target_percent_s[3];
196 char target_percent_s_newline[4];
197 static tree do_mpfr_remquo (tree, tree, tree);
198 static tree do_mpfr_lgamma_r (tree, tree, tree);
199 static void expand_builtin_sync_synchronize (void);
201 access_ref::access_ref (tree bound /* = NULL_TREE */,
202 bool minaccess /* = false */)
203 : ref ()
205 /* Set to valid. */
206 offrng[0] = offrng[1] = 0;
207 /* Invalidate. */
208 sizrng[0] = sizrng[1] = -1;
210 /* Set the default bounds of the access and adjust below. */
211 bndrng[0] = minaccess ? 1 : 0;
212 bndrng[1] = HOST_WIDE_INT_M1U;
214 /* When BOUND is nonnull and a range can be extracted from it,
215 set the bounds of the access to reflect both it and MINACCESS.
216 BNDRNG[0] is the size of the minimum access. */
217 tree rng[2];
218 if (bound && get_size_range (bound, rng, true))
220 bndrng[0] = wi::to_offset (rng[0]);
221 bndrng[1] = wi::to_offset (rng[1]);
222 bndrng[0] = bndrng[0] > 0 && minaccess ? 1 : 0;
226 /* Return true if NAME starts with __builtin_ or __sync_. */
228 static bool
229 is_builtin_name (const char *name)
231 if (strncmp (name, "__builtin_", 10) == 0)
232 return true;
233 if (strncmp (name, "__sync_", 7) == 0)
234 return true;
235 if (strncmp (name, "__atomic_", 9) == 0)
236 return true;
237 return false;
240 /* Return true if NODE should be considered for inline expansion regardless
241 of the optimization level. This means whenever a function is invoked with
242 its "internal" name, which normally contains the prefix "__builtin". */
244 bool
245 called_as_built_in (tree node)
247 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
248 we want the name used to call the function, not the name it
249 will have. */
250 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
251 return is_builtin_name (name);
254 /* Compute values M and N such that M divides (address of EXP - N) and such
255 that N < M. If these numbers can be determined, store M in alignp and N in
256 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
257 *alignp and any bit-offset to *bitposp.
259 Note that the address (and thus the alignment) computed here is based
260 on the address to which a symbol resolves, whereas DECL_ALIGN is based
261 on the address at which an object is actually located. These two
262 addresses are not always the same. For example, on ARM targets,
263 the address &foo of a Thumb function foo() has the lowest bit set,
264 whereas foo() itself starts on an even address.
266 If ADDR_P is true we are taking the address of the memory reference EXP
267 and thus cannot rely on the access taking place. */
269 static bool
270 get_object_alignment_2 (tree exp, unsigned int *alignp,
271 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
273 poly_int64 bitsize, bitpos;
274 tree offset;
275 machine_mode mode;
276 int unsignedp, reversep, volatilep;
277 unsigned int align = BITS_PER_UNIT;
278 bool known_alignment = false;
280 /* Get the innermost object and the constant (bitpos) and possibly
281 variable (offset) offset of the access. */
282 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
283 &unsignedp, &reversep, &volatilep);
285 /* Extract alignment information from the innermost object and
286 possibly adjust bitpos and offset. */
287 if (TREE_CODE (exp) == FUNCTION_DECL)
289 /* Function addresses can encode extra information besides their
290 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
291 allows the low bit to be used as a virtual bit, we know
292 that the address itself must be at least 2-byte aligned. */
293 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
294 align = 2 * BITS_PER_UNIT;
296 else if (TREE_CODE (exp) == LABEL_DECL)
298 else if (TREE_CODE (exp) == CONST_DECL)
300 /* The alignment of a CONST_DECL is determined by its initializer. */
301 exp = DECL_INITIAL (exp);
302 align = TYPE_ALIGN (TREE_TYPE (exp));
303 if (CONSTANT_CLASS_P (exp))
304 align = targetm.constant_alignment (exp, align);
306 known_alignment = true;
308 else if (DECL_P (exp))
310 align = DECL_ALIGN (exp);
311 known_alignment = true;
313 else if (TREE_CODE (exp) == INDIRECT_REF
314 || TREE_CODE (exp) == MEM_REF
315 || TREE_CODE (exp) == TARGET_MEM_REF)
317 tree addr = TREE_OPERAND (exp, 0);
318 unsigned ptr_align;
319 unsigned HOST_WIDE_INT ptr_bitpos;
320 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
322 /* If the address is explicitely aligned, handle that. */
323 if (TREE_CODE (addr) == BIT_AND_EXPR
324 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
326 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
327 ptr_bitmask *= BITS_PER_UNIT;
328 align = least_bit_hwi (ptr_bitmask);
329 addr = TREE_OPERAND (addr, 0);
332 known_alignment
333 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
334 align = MAX (ptr_align, align);
336 /* Re-apply explicit alignment to the bitpos. */
337 ptr_bitpos &= ptr_bitmask;
339 /* The alignment of the pointer operand in a TARGET_MEM_REF
340 has to take the variable offset parts into account. */
341 if (TREE_CODE (exp) == TARGET_MEM_REF)
343 if (TMR_INDEX (exp))
345 unsigned HOST_WIDE_INT step = 1;
346 if (TMR_STEP (exp))
347 step = TREE_INT_CST_LOW (TMR_STEP (exp));
348 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
350 if (TMR_INDEX2 (exp))
351 align = BITS_PER_UNIT;
352 known_alignment = false;
355 /* When EXP is an actual memory reference then we can use
356 TYPE_ALIGN of a pointer indirection to derive alignment.
357 Do so only if get_pointer_alignment_1 did not reveal absolute
358 alignment knowledge and if using that alignment would
359 improve the situation. */
360 unsigned int talign;
361 if (!addr_p && !known_alignment
362 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
363 && talign > align)
364 align = talign;
365 else
367 /* Else adjust bitpos accordingly. */
368 bitpos += ptr_bitpos;
369 if (TREE_CODE (exp) == MEM_REF
370 || TREE_CODE (exp) == TARGET_MEM_REF)
371 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
374 else if (TREE_CODE (exp) == STRING_CST)
376 /* STRING_CST are the only constant objects we allow to be not
377 wrapped inside a CONST_DECL. */
378 align = TYPE_ALIGN (TREE_TYPE (exp));
379 if (CONSTANT_CLASS_P (exp))
380 align = targetm.constant_alignment (exp, align);
382 known_alignment = true;
385 /* If there is a non-constant offset part extract the maximum
386 alignment that can prevail. */
387 if (offset)
389 unsigned int trailing_zeros = tree_ctz (offset);
390 if (trailing_zeros < HOST_BITS_PER_INT)
392 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
393 if (inner)
394 align = MIN (align, inner);
398 /* Account for the alignment of runtime coefficients, so that the constant
399 bitpos is guaranteed to be accurate. */
400 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
401 if (alt_align != 0 && alt_align < align)
403 align = alt_align;
404 known_alignment = false;
407 *alignp = align;
408 *bitposp = bitpos.coeffs[0] & (align - 1);
409 return known_alignment;
412 /* For a memory reference expression EXP compute values M and N such that M
413 divides (&EXP - N) and such that N < M. If these numbers can be determined,
414 store M in alignp and N in *BITPOSP and return true. Otherwise return false
415 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
417 bool
418 get_object_alignment_1 (tree exp, unsigned int *alignp,
419 unsigned HOST_WIDE_INT *bitposp)
421 return get_object_alignment_2 (exp, alignp, bitposp, false);
424 /* Return the alignment in bits of EXP, an object. */
426 unsigned int
427 get_object_alignment (tree exp)
429 unsigned HOST_WIDE_INT bitpos = 0;
430 unsigned int align;
432 get_object_alignment_1 (exp, &align, &bitpos);
434 /* align and bitpos now specify known low bits of the pointer.
435 ptr & (align - 1) == bitpos. */
437 if (bitpos != 0)
438 align = least_bit_hwi (bitpos);
439 return align;
442 /* For a pointer valued expression EXP compute values M and N such that M
443 divides (EXP - N) and such that N < M. If these numbers can be determined,
444 store M in alignp and N in *BITPOSP and return true. Return false if
445 the results are just a conservative approximation.
447 If EXP is not a pointer, false is returned too. */
449 bool
450 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
451 unsigned HOST_WIDE_INT *bitposp)
453 STRIP_NOPS (exp);
455 if (TREE_CODE (exp) == ADDR_EXPR)
456 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
457 alignp, bitposp, true);
458 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
460 unsigned int align;
461 unsigned HOST_WIDE_INT bitpos;
462 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
463 &align, &bitpos);
464 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
465 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
466 else
468 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
469 if (trailing_zeros < HOST_BITS_PER_INT)
471 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
472 if (inner)
473 align = MIN (align, inner);
476 *alignp = align;
477 *bitposp = bitpos & (align - 1);
478 return res;
480 else if (TREE_CODE (exp) == SSA_NAME
481 && POINTER_TYPE_P (TREE_TYPE (exp)))
483 unsigned int ptr_align, ptr_misalign;
484 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
486 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
488 *bitposp = ptr_misalign * BITS_PER_UNIT;
489 *alignp = ptr_align * BITS_PER_UNIT;
490 /* Make sure to return a sensible alignment when the multiplication
491 by BITS_PER_UNIT overflowed. */
492 if (*alignp == 0)
493 *alignp = 1u << (HOST_BITS_PER_INT - 1);
494 /* We cannot really tell whether this result is an approximation. */
495 return false;
497 else
499 *bitposp = 0;
500 *alignp = BITS_PER_UNIT;
501 return false;
504 else if (TREE_CODE (exp) == INTEGER_CST)
506 *alignp = BIGGEST_ALIGNMENT;
507 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
508 & (BIGGEST_ALIGNMENT - 1));
509 return true;
512 *bitposp = 0;
513 *alignp = BITS_PER_UNIT;
514 return false;
517 /* Return the alignment in bits of EXP, a pointer valued expression.
518 The alignment returned is, by default, the alignment of the thing that
519 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
521 Otherwise, look at the expression to see if we can do better, i.e., if the
522 expression is actually pointing at an object whose alignment is tighter. */
524 unsigned int
525 get_pointer_alignment (tree exp)
527 unsigned HOST_WIDE_INT bitpos = 0;
528 unsigned int align;
530 get_pointer_alignment_1 (exp, &align, &bitpos);
532 /* align and bitpos now specify known low bits of the pointer.
533 ptr & (align - 1) == bitpos. */
535 if (bitpos != 0)
536 align = least_bit_hwi (bitpos);
538 return align;
541 /* Return the number of leading non-zero elements in the sequence
542 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
543 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
545 unsigned
546 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
548 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
550 unsigned n;
552 if (eltsize == 1)
554 /* Optimize the common case of plain char. */
555 for (n = 0; n < maxelts; n++)
557 const char *elt = (const char*) ptr + n;
558 if (!*elt)
559 break;
562 else
564 for (n = 0; n < maxelts; n++)
566 const char *elt = (const char*) ptr + n * eltsize;
567 if (!memcmp (elt, "\0\0\0\0", eltsize))
568 break;
571 return n;
574 /* For a call EXPR at LOC to a function FNAME that expects a string
575 in the argument ARG, issue a diagnostic due to it being a called
576 with an argument that is a character array with no terminating
577 NUL. SIZE is the EXACT size of the array, and BNDRNG the number
578 of characters in which the NUL is expected. Either EXPR or FNAME
579 may be null but noth both. SIZE may be null when BNDRNG is null. */
581 void
582 warn_string_no_nul (location_t loc, tree expr, const char *fname,
583 tree arg, tree decl, tree size /* = NULL_TREE */,
584 bool exact /* = false */,
585 const wide_int bndrng[2] /* = NULL */)
587 if ((expr && TREE_NO_WARNING (expr)) || TREE_NO_WARNING (arg))
588 return;
590 loc = expansion_point_location_if_in_system_header (loc);
591 bool warned;
593 /* Format the bound range as a string to keep the nuber of messages
594 from exploding. */
595 char bndstr[80];
596 *bndstr = 0;
597 if (bndrng)
599 if (bndrng[0] == bndrng[1])
600 sprintf (bndstr, "%llu", (unsigned long long) bndrng[0].to_uhwi ());
601 else
602 sprintf (bndstr, "[%llu, %llu]",
603 (unsigned long long) bndrng[0].to_uhwi (),
604 (unsigned long long) bndrng[1].to_uhwi ());
607 const tree maxobjsize = max_object_size ();
608 const wide_int maxsiz = wi::to_wide (maxobjsize);
609 if (expr)
611 tree func = get_callee_fndecl (expr);
612 if (bndrng)
614 if (wi::ltu_p (maxsiz, bndrng[0]))
615 warned = warning_at (loc, OPT_Wstringop_overread,
616 "%K%qD specified bound %s exceeds "
617 "maximum object size %E",
618 expr, func, bndstr, maxobjsize);
619 else
621 bool maybe = wi::to_wide (size) == bndrng[0];
622 warned = warning_at (loc, OPT_Wstringop_overread,
623 exact
624 ? G_("%K%qD specified bound %s exceeds "
625 "the size %E of unterminated array")
626 : (maybe
627 ? G_("%K%qD specified bound %s may "
628 "exceed the size of at most %E "
629 "of unterminated array")
630 : G_("%K%qD specified bound %s exceeds "
631 "the size of at most %E "
632 "of unterminated array")),
633 expr, func, bndstr, size);
636 else
637 warned = warning_at (loc, OPT_Wstringop_overread,
638 "%K%qD argument missing terminating nul",
639 expr, func);
641 else
643 if (bndrng)
645 if (wi::ltu_p (maxsiz, bndrng[0]))
646 warned = warning_at (loc, OPT_Wstringop_overread,
647 "%qs specified bound %s exceeds "
648 "maximum object size %E",
649 fname, bndstr, maxobjsize);
650 else
652 bool maybe = wi::to_wide (size) == bndrng[0];
653 warned = warning_at (loc, OPT_Wstringop_overread,
654 exact
655 ? G_("%qs specified bound %s exceeds "
656 "the size %E of unterminated array")
657 : (maybe
658 ? G_("%qs specified bound %s may "
659 "exceed the size of at most %E "
660 "of unterminated array")
661 : G_("%qs specified bound %s exceeds "
662 "the size of at most %E "
663 "of unterminated array")),
664 fname, bndstr, size);
667 else
668 warned = warning_at (loc, OPT_Wstringop_overread,
669 "%qsargument missing terminating nul",
670 fname);
673 if (warned)
675 inform (DECL_SOURCE_LOCATION (decl),
676 "referenced argument declared here");
677 TREE_NO_WARNING (arg) = 1;
678 if (expr)
679 TREE_NO_WARNING (expr) = 1;
683 /* For a call EXPR (which may be null) that expects a string argument
684 SRC as an argument, returns false if SRC is a character array with
685 no terminating NUL. When nonnull, BOUND is the number of characters
686 in which to expect the terminating NUL. RDONLY is true for read-only
687 accesses such as strcmp, false for read-write such as strcpy. When
688 EXPR is also issues a warning. */
690 bool
691 check_nul_terminated_array (tree expr, tree src,
692 tree bound /* = NULL_TREE */)
694 /* The constant size of the array SRC points to. The actual size
695 may be less of EXACT is true, but not more. */
696 tree size;
697 /* True if SRC involves a non-constant offset into the array. */
698 bool exact;
699 /* The unterminated constant array SRC points to. */
700 tree nonstr = unterminated_array (src, &size, &exact);
701 if (!nonstr)
702 return true;
704 /* NONSTR refers to the non-nul terminated constant array and SIZE
705 is the constant size of the array in bytes. EXACT is true when
706 SIZE is exact. */
708 wide_int bndrng[2];
709 if (bound)
711 if (TREE_CODE (bound) == INTEGER_CST)
712 bndrng[0] = bndrng[1] = wi::to_wide (bound);
713 else
715 value_range_kind rng = get_range_info (bound, bndrng, bndrng + 1);
716 if (rng != VR_RANGE)
717 return true;
720 if (exact)
722 if (wi::leu_p (bndrng[0], wi::to_wide (size)))
723 return true;
725 else if (wi::lt_p (bndrng[0], wi::to_wide (size), UNSIGNED))
726 return true;
729 if (expr)
730 warn_string_no_nul (EXPR_LOCATION (expr), expr, NULL, src, nonstr,
731 size, exact, bound ? bndrng : NULL);
733 return false;
736 /* If EXP refers to an unterminated constant character array return
737 the declaration of the object of which the array is a member or
738 element and if SIZE is not null, set *SIZE to the size of
739 the unterminated array and set *EXACT if the size is exact or
740 clear it otherwise. Otherwise return null. */
742 tree
743 unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */)
745 /* C_STRLEN will return NULL and set DECL in the info
746 structure if EXP references a unterminated array. */
747 c_strlen_data lendata = { };
748 tree len = c_strlen (exp, 1, &lendata);
749 if (len == NULL_TREE && lendata.minlen && lendata.decl)
751 if (size)
753 len = lendata.minlen;
754 if (lendata.off)
756 /* Constant offsets are already accounted for in LENDATA.MINLEN,
757 but not in a SSA_NAME + CST expression. */
758 if (TREE_CODE (lendata.off) == INTEGER_CST)
759 *exact = true;
760 else if (TREE_CODE (lendata.off) == PLUS_EXPR
761 && TREE_CODE (TREE_OPERAND (lendata.off, 1)) == INTEGER_CST)
763 /* Subtract the offset from the size of the array. */
764 *exact = false;
765 tree temp = TREE_OPERAND (lendata.off, 1);
766 temp = fold_convert (ssizetype, temp);
767 len = fold_build2 (MINUS_EXPR, ssizetype, len, temp);
769 else
770 *exact = false;
772 else
773 *exact = true;
775 *size = len;
777 return lendata.decl;
780 return NULL_TREE;
783 /* Compute the length of a null-terminated character string or wide
784 character string handling character sizes of 1, 2, and 4 bytes.
785 TREE_STRING_LENGTH is not the right way because it evaluates to
786 the size of the character array in bytes (as opposed to characters)
787 and because it can contain a zero byte in the middle.
789 ONLY_VALUE should be nonzero if the result is not going to be emitted
790 into the instruction stream and zero if it is going to be expanded.
791 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
792 is returned, otherwise NULL, since
793 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
794 evaluate the side-effects.
796 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
797 accesses. Note that this implies the result is not going to be emitted
798 into the instruction stream.
800 Additional information about the string accessed may be recorded
801 in DATA. For example, if ARG references an unterminated string,
802 then the declaration will be stored in the DECL field. If the
803 length of the unterminated string can be determined, it'll be
804 stored in the LEN field. Note this length could well be different
805 than what a C strlen call would return.
807 ELTSIZE is 1 for normal single byte character strings, and 2 or
808 4 for wide characer strings. ELTSIZE is by default 1.
810 The value returned is of type `ssizetype'. */
812 tree
813 c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
815 /* If we were not passed a DATA pointer, then get one to a local
816 structure. That avoids having to check DATA for NULL before
817 each time we want to use it. */
818 c_strlen_data local_strlen_data = { };
819 if (!data)
820 data = &local_strlen_data;
822 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
824 tree src = STRIP_NOPS (arg);
825 if (TREE_CODE (src) == COND_EXPR
826 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
828 tree len1, len2;
830 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
831 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
832 if (tree_int_cst_equal (len1, len2))
833 return len1;
836 if (TREE_CODE (src) == COMPOUND_EXPR
837 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
838 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
840 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
842 /* Offset from the beginning of the string in bytes. */
843 tree byteoff;
844 tree memsize;
845 tree decl;
846 src = string_constant (src, &byteoff, &memsize, &decl);
847 if (src == 0)
848 return NULL_TREE;
850 /* Determine the size of the string element. */
851 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
852 return NULL_TREE;
854 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
855 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
856 in case the latter is less than the size of the array, such as when
857 SRC refers to a short string literal used to initialize a large array.
858 In that case, the elements of the array after the terminating NUL are
859 all NUL. */
860 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
861 strelts = strelts / eltsize;
863 if (!tree_fits_uhwi_p (memsize))
864 return NULL_TREE;
866 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
868 /* PTR can point to the byte representation of any string type, including
869 char* and wchar_t*. */
870 const char *ptr = TREE_STRING_POINTER (src);
872 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
874 /* The code below works only for single byte character types. */
875 if (eltsize != 1)
876 return NULL_TREE;
878 /* If the string has an internal NUL character followed by any
879 non-NUL characters (e.g., "foo\0bar"), we can't compute
880 the offset to the following NUL if we don't know where to
881 start searching for it. */
882 unsigned len = string_length (ptr, eltsize, strelts);
884 /* Return when an embedded null character is found or none at all.
885 In the latter case, set the DECL/LEN field in the DATA structure
886 so that callers may examine them. */
887 if (len + 1 < strelts)
888 return NULL_TREE;
889 else if (len >= maxelts)
891 data->decl = decl;
892 data->off = byteoff;
893 data->minlen = ssize_int (len);
894 return NULL_TREE;
897 /* For empty strings the result should be zero. */
898 if (len == 0)
899 return ssize_int (0);
901 /* We don't know the starting offset, but we do know that the string
902 has no internal zero bytes. If the offset falls within the bounds
903 of the string subtract the offset from the length of the string,
904 and return that. Otherwise the length is zero. Take care to
905 use SAVE_EXPR in case the OFFSET has side-effects. */
906 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
907 : byteoff;
908 offsave = fold_convert_loc (loc, sizetype, offsave);
909 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
910 size_int (len));
911 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
912 offsave);
913 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
914 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
915 build_zero_cst (ssizetype));
918 /* Offset from the beginning of the string in elements. */
919 HOST_WIDE_INT eltoff;
921 /* We have a known offset into the string. Start searching there for
922 a null character if we can represent it as a single HOST_WIDE_INT. */
923 if (byteoff == 0)
924 eltoff = 0;
925 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
926 eltoff = -1;
927 else
928 eltoff = tree_to_uhwi (byteoff) / eltsize;
930 /* If the offset is known to be out of bounds, warn, and call strlen at
931 runtime. */
932 if (eltoff < 0 || eltoff >= maxelts)
934 /* Suppress multiple warnings for propagated constant strings. */
935 if (only_value != 2
936 && !TREE_NO_WARNING (arg)
937 && warning_at (loc, OPT_Warray_bounds,
938 "offset %qwi outside bounds of constant string",
939 eltoff))
941 if (decl)
942 inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
943 TREE_NO_WARNING (arg) = 1;
945 return NULL_TREE;
948 /* If eltoff is larger than strelts but less than maxelts the
949 string length is zero, since the excess memory will be zero. */
950 if (eltoff > strelts)
951 return ssize_int (0);
953 /* Use strlen to search for the first zero byte. Since any strings
954 constructed with build_string will have nulls appended, we win even
955 if we get handed something like (char[4])"abcd".
957 Since ELTOFF is our starting index into the string, no further
958 calculation is needed. */
959 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
960 strelts - eltoff);
962 /* Don't know what to return if there was no zero termination.
963 Ideally this would turn into a gcc_checking_assert over time.
964 Set DECL/LEN so callers can examine them. */
965 if (len >= maxelts - eltoff)
967 data->decl = decl;
968 data->off = byteoff;
969 data->minlen = ssize_int (len);
970 return NULL_TREE;
973 return ssize_int (len);
976 /* Return a constant integer corresponding to target reading
977 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
978 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
979 are assumed to be zero, otherwise it reads as many characters
980 as needed. */
983 c_readstr (const char *str, scalar_int_mode mode,
984 bool null_terminated_p/*=true*/)
986 HOST_WIDE_INT ch;
987 unsigned int i, j;
988 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
990 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
991 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
992 / HOST_BITS_PER_WIDE_INT;
994 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
995 for (i = 0; i < len; i++)
996 tmp[i] = 0;
998 ch = 1;
999 for (i = 0; i < GET_MODE_SIZE (mode); i++)
1001 j = i;
1002 if (WORDS_BIG_ENDIAN)
1003 j = GET_MODE_SIZE (mode) - i - 1;
1004 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
1005 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
1006 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
1007 j *= BITS_PER_UNIT;
1009 if (ch || !null_terminated_p)
1010 ch = (unsigned char) str[i];
1011 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
1014 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
1015 return immed_wide_int_const (c, mode);
1018 /* Cast a target constant CST to target CHAR and if that value fits into
1019 host char type, return zero and put that value into variable pointed to by
1020 P. */
1022 static int
1023 target_char_cast (tree cst, char *p)
1025 unsigned HOST_WIDE_INT val, hostval;
1027 if (TREE_CODE (cst) != INTEGER_CST
1028 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
1029 return 1;
1031 /* Do not care if it fits or not right here. */
1032 val = TREE_INT_CST_LOW (cst);
1034 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
1035 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
1037 hostval = val;
1038 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
1039 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
1041 if (val != hostval)
1042 return 1;
1044 *p = hostval;
1045 return 0;
1048 /* Similar to save_expr, but assumes that arbitrary code is not executed
1049 in between the multiple evaluations. In particular, we assume that a
1050 non-addressable local variable will not be modified. */
1052 static tree
1053 builtin_save_expr (tree exp)
1055 if (TREE_CODE (exp) == SSA_NAME
1056 || (TREE_ADDRESSABLE (exp) == 0
1057 && (TREE_CODE (exp) == PARM_DECL
1058 || (VAR_P (exp) && !TREE_STATIC (exp)))))
1059 return exp;
1061 return save_expr (exp);
1064 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
1065 times to get the address of either a higher stack frame, or a return
1066 address located within it (depending on FNDECL_CODE). */
1068 static rtx
1069 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
1071 int i;
1072 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
1073 if (tem == NULL_RTX)
1075 /* For a zero count with __builtin_return_address, we don't care what
1076 frame address we return, because target-specific definitions will
1077 override us. Therefore frame pointer elimination is OK, and using
1078 the soft frame pointer is OK.
1080 For a nonzero count, or a zero count with __builtin_frame_address,
1081 we require a stable offset from the current frame pointer to the
1082 previous one, so we must use the hard frame pointer, and
1083 we must disable frame pointer elimination. */
1084 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
1085 tem = frame_pointer_rtx;
1086 else
1088 tem = hard_frame_pointer_rtx;
1090 /* Tell reload not to eliminate the frame pointer. */
1091 crtl->accesses_prior_frames = 1;
1095 if (count > 0)
1096 SETUP_FRAME_ADDRESSES ();
1098 /* On the SPARC, the return address is not in the frame, it is in a
1099 register. There is no way to access it off of the current frame
1100 pointer, but it can be accessed off the previous frame pointer by
1101 reading the value from the register window save area. */
1102 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
1103 count--;
1105 /* Scan back COUNT frames to the specified frame. */
1106 for (i = 0; i < count; i++)
1108 /* Assume the dynamic chain pointer is in the word that the
1109 frame address points to, unless otherwise specified. */
1110 tem = DYNAMIC_CHAIN_ADDRESS (tem);
1111 tem = memory_address (Pmode, tem);
1112 tem = gen_frame_mem (Pmode, tem);
1113 tem = copy_to_reg (tem);
1116 /* For __builtin_frame_address, return what we've got. But, on
1117 the SPARC for example, we may have to add a bias. */
1118 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
1119 return FRAME_ADDR_RTX (tem);
1121 /* For __builtin_return_address, get the return address from that frame. */
1122 #ifdef RETURN_ADDR_RTX
1123 tem = RETURN_ADDR_RTX (count, tem);
1124 #else
1125 tem = memory_address (Pmode,
1126 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
1127 tem = gen_frame_mem (Pmode, tem);
1128 #endif
1129 return tem;
1132 /* Alias set used for setjmp buffer. */
1133 static alias_set_type setjmp_alias_set = -1;
1135 /* Construct the leading half of a __builtin_setjmp call. Control will
1136 return to RECEIVER_LABEL. This is also called directly by the SJLJ
1137 exception handling code. */
1139 void
1140 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
1142 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1143 rtx stack_save;
1144 rtx mem;
1146 if (setjmp_alias_set == -1)
1147 setjmp_alias_set = new_alias_set ();
1149 buf_addr = convert_memory_address (Pmode, buf_addr);
1151 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
1153 /* We store the frame pointer and the address of receiver_label in
1154 the buffer and use the rest of it for the stack save area, which
1155 is machine-dependent. */
1157 mem = gen_rtx_MEM (Pmode, buf_addr);
1158 set_mem_alias_set (mem, setjmp_alias_set);
1159 emit_move_insn (mem, hard_frame_pointer_rtx);
1161 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1162 GET_MODE_SIZE (Pmode))),
1163 set_mem_alias_set (mem, setjmp_alias_set);
1165 emit_move_insn (validize_mem (mem),
1166 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
1168 stack_save = gen_rtx_MEM (sa_mode,
1169 plus_constant (Pmode, buf_addr,
1170 2 * GET_MODE_SIZE (Pmode)));
1171 set_mem_alias_set (stack_save, setjmp_alias_set);
1172 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1174 /* If there is further processing to do, do it. */
1175 if (targetm.have_builtin_setjmp_setup ())
1176 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
1178 /* We have a nonlocal label. */
1179 cfun->has_nonlocal_label = 1;
1182 /* Construct the trailing part of a __builtin_setjmp call. This is
1183 also called directly by the SJLJ exception handling code.
1184 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
1186 void
1187 expand_builtin_setjmp_receiver (rtx receiver_label)
1189 rtx chain;
1191 /* Mark the FP as used when we get here, so we have to make sure it's
1192 marked as used by this function. */
1193 emit_use (hard_frame_pointer_rtx);
1195 /* Mark the static chain as clobbered here so life information
1196 doesn't get messed up for it. */
1197 chain = rtx_for_static_chain (current_function_decl, true);
1198 if (chain && REG_P (chain))
1199 emit_clobber (chain);
1201 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
1203 /* If the argument pointer can be eliminated in favor of the
1204 frame pointer, we don't need to restore it. We assume here
1205 that if such an elimination is present, it can always be used.
1206 This is the case on all known machines; if we don't make this
1207 assumption, we do unnecessary saving on many machines. */
1208 size_t i;
1209 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1211 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1212 if (elim_regs[i].from == ARG_POINTER_REGNUM
1213 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1214 break;
1216 if (i == ARRAY_SIZE (elim_regs))
1218 /* Now restore our arg pointer from the address at which it
1219 was saved in our stack frame. */
1220 emit_move_insn (crtl->args.internal_arg_pointer,
1221 copy_to_reg (get_arg_pointer_save_area ()));
1225 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
1226 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
1227 else if (targetm.have_nonlocal_goto_receiver ())
1228 emit_insn (targetm.gen_nonlocal_goto_receiver ());
1229 else
1230 { /* Nothing */ }
1232 /* We must not allow the code we just generated to be reordered by
1233 scheduling. Specifically, the update of the frame pointer must
1234 happen immediately, not later. */
1235 emit_insn (gen_blockage ());
1238 /* __builtin_longjmp is passed a pointer to an array of five words (not
1239 all will be used on all machines). It operates similarly to the C
1240 library function of the same name, but is more efficient. Much of
1241 the code below is copied from the handling of non-local gotos. */
1243 static void
1244 expand_builtin_longjmp (rtx buf_addr, rtx value)
1246 rtx fp, lab, stack;
1247 rtx_insn *insn, *last;
1248 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1250 /* DRAP is needed for stack realign if longjmp is expanded to current
1251 function */
1252 if (SUPPORTS_STACK_ALIGNMENT)
1253 crtl->need_drap = true;
1255 if (setjmp_alias_set == -1)
1256 setjmp_alias_set = new_alias_set ();
1258 buf_addr = convert_memory_address (Pmode, buf_addr);
1260 buf_addr = force_reg (Pmode, buf_addr);
1262 /* We require that the user must pass a second argument of 1, because
1263 that is what builtin_setjmp will return. */
1264 gcc_assert (value == const1_rtx);
1266 last = get_last_insn ();
1267 if (targetm.have_builtin_longjmp ())
1268 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1269 else
1271 fp = gen_rtx_MEM (Pmode, buf_addr);
1272 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1273 GET_MODE_SIZE (Pmode)));
1275 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1276 2 * GET_MODE_SIZE (Pmode)));
1277 set_mem_alias_set (fp, setjmp_alias_set);
1278 set_mem_alias_set (lab, setjmp_alias_set);
1279 set_mem_alias_set (stack, setjmp_alias_set);
1281 /* Pick up FP, label, and SP from the block and jump. This code is
1282 from expand_goto in stmt.c; see there for detailed comments. */
1283 if (targetm.have_nonlocal_goto ())
1284 /* We have to pass a value to the nonlocal_goto pattern that will
1285 get copied into the static_chain pointer, but it does not matter
1286 what that value is, because builtin_setjmp does not use it. */
1287 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1288 else
1290 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1291 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1293 lab = copy_to_reg (lab);
1295 /* Restore the frame pointer and stack pointer. We must use a
1296 temporary since the setjmp buffer may be a local. */
1297 fp = copy_to_reg (fp);
1298 emit_stack_restore (SAVE_NONLOCAL, stack);
1300 /* Ensure the frame pointer move is not optimized. */
1301 emit_insn (gen_blockage ());
1302 emit_clobber (hard_frame_pointer_rtx);
1303 emit_clobber (frame_pointer_rtx);
1304 emit_move_insn (hard_frame_pointer_rtx, fp);
1306 emit_use (hard_frame_pointer_rtx);
1307 emit_use (stack_pointer_rtx);
1308 emit_indirect_jump (lab);
1312 /* Search backwards and mark the jump insn as a non-local goto.
1313 Note that this precludes the use of __builtin_longjmp to a
1314 __builtin_setjmp target in the same function. However, we've
1315 already cautioned the user that these functions are for
1316 internal exception handling use only. */
1317 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1319 gcc_assert (insn != last);
1321 if (JUMP_P (insn))
1323 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1324 break;
1326 else if (CALL_P (insn))
1327 break;
1331 static inline bool
1332 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1334 return (iter->i < iter->n);
1337 /* This function validates the types of a function call argument list
1338 against a specified list of tree_codes. If the last specifier is a 0,
1339 that represents an ellipsis, otherwise the last specifier must be a
1340 VOID_TYPE. */
1342 static bool
1343 validate_arglist (const_tree callexpr, ...)
1345 enum tree_code code;
1346 bool res = 0;
1347 va_list ap;
1348 const_call_expr_arg_iterator iter;
1349 const_tree arg;
1351 va_start (ap, callexpr);
1352 init_const_call_expr_arg_iterator (callexpr, &iter);
1354 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1355 tree fn = CALL_EXPR_FN (callexpr);
1356 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1358 for (unsigned argno = 1; ; ++argno)
1360 code = (enum tree_code) va_arg (ap, int);
1362 switch (code)
1364 case 0:
1365 /* This signifies an ellipses, any further arguments are all ok. */
1366 res = true;
1367 goto end;
1368 case VOID_TYPE:
1369 /* This signifies an endlink, if no arguments remain, return
1370 true, otherwise return false. */
1371 res = !more_const_call_expr_args_p (&iter);
1372 goto end;
1373 case POINTER_TYPE:
1374 /* The actual argument must be nonnull when either the whole
1375 called function has been declared nonnull, or when the formal
1376 argument corresponding to the actual argument has been. */
1377 if (argmap
1378 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1380 arg = next_const_call_expr_arg (&iter);
1381 if (!validate_arg (arg, code) || integer_zerop (arg))
1382 goto end;
1383 break;
1385 /* FALLTHRU */
1386 default:
1387 /* If no parameters remain or the parameter's code does not
1388 match the specified code, return false. Otherwise continue
1389 checking any remaining arguments. */
1390 arg = next_const_call_expr_arg (&iter);
1391 if (!validate_arg (arg, code))
1392 goto end;
1393 break;
1397 /* We need gotos here since we can only have one VA_CLOSE in a
1398 function. */
1399 end: ;
1400 va_end (ap);
1402 BITMAP_FREE (argmap);
1404 return res;
1407 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1408 and the address of the save area. */
1410 static rtx
1411 expand_builtin_nonlocal_goto (tree exp)
1413 tree t_label, t_save_area;
1414 rtx r_label, r_save_area, r_fp, r_sp;
1415 rtx_insn *insn;
1417 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1418 return NULL_RTX;
1420 t_label = CALL_EXPR_ARG (exp, 0);
1421 t_save_area = CALL_EXPR_ARG (exp, 1);
1423 r_label = expand_normal (t_label);
1424 r_label = convert_memory_address (Pmode, r_label);
1425 r_save_area = expand_normal (t_save_area);
1426 r_save_area = convert_memory_address (Pmode, r_save_area);
1427 /* Copy the address of the save location to a register just in case it was
1428 based on the frame pointer. */
1429 r_save_area = copy_to_reg (r_save_area);
1430 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1431 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1432 plus_constant (Pmode, r_save_area,
1433 GET_MODE_SIZE (Pmode)));
1435 crtl->has_nonlocal_goto = 1;
1437 /* ??? We no longer need to pass the static chain value, afaik. */
1438 if (targetm.have_nonlocal_goto ())
1439 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1440 else
1442 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1443 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1445 r_label = copy_to_reg (r_label);
1447 /* Restore the frame pointer and stack pointer. We must use a
1448 temporary since the setjmp buffer may be a local. */
1449 r_fp = copy_to_reg (r_fp);
1450 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1452 /* Ensure the frame pointer move is not optimized. */
1453 emit_insn (gen_blockage ());
1454 emit_clobber (hard_frame_pointer_rtx);
1455 emit_clobber (frame_pointer_rtx);
1456 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1458 /* USE of hard_frame_pointer_rtx added for consistency;
1459 not clear if really needed. */
1460 emit_use (hard_frame_pointer_rtx);
1461 emit_use (stack_pointer_rtx);
1463 /* If the architecture is using a GP register, we must
1464 conservatively assume that the target function makes use of it.
1465 The prologue of functions with nonlocal gotos must therefore
1466 initialize the GP register to the appropriate value, and we
1467 must then make sure that this value is live at the point
1468 of the jump. (Note that this doesn't necessarily apply
1469 to targets with a nonlocal_goto pattern; they are free
1470 to implement it in their own way. Note also that this is
1471 a no-op if the GP register is a global invariant.) */
1472 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1473 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1474 emit_use (pic_offset_table_rtx);
1476 emit_indirect_jump (r_label);
1479 /* Search backwards to the jump insn and mark it as a
1480 non-local goto. */
1481 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1483 if (JUMP_P (insn))
1485 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1486 break;
1488 else if (CALL_P (insn))
1489 break;
1492 return const0_rtx;
1495 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1496 (not all will be used on all machines) that was passed to __builtin_setjmp.
1497 It updates the stack pointer in that block to the current value. This is
1498 also called directly by the SJLJ exception handling code. */
1500 void
1501 expand_builtin_update_setjmp_buf (rtx buf_addr)
1503 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1504 buf_addr = convert_memory_address (Pmode, buf_addr);
1505 rtx stack_save
1506 = gen_rtx_MEM (sa_mode,
1507 memory_address
1508 (sa_mode,
1509 plus_constant (Pmode, buf_addr,
1510 2 * GET_MODE_SIZE (Pmode))));
1512 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1515 /* Expand a call to __builtin_prefetch. For a target that does not support
1516 data prefetch, evaluate the memory address argument in case it has side
1517 effects. */
1519 static void
1520 expand_builtin_prefetch (tree exp)
1522 tree arg0, arg1, arg2;
1523 int nargs;
1524 rtx op0, op1, op2;
1526 if (!validate_arglist (exp, POINTER_TYPE, 0))
1527 return;
1529 arg0 = CALL_EXPR_ARG (exp, 0);
1531 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1532 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1533 locality). */
1534 nargs = call_expr_nargs (exp);
1535 if (nargs > 1)
1536 arg1 = CALL_EXPR_ARG (exp, 1);
1537 else
1538 arg1 = integer_zero_node;
1539 if (nargs > 2)
1540 arg2 = CALL_EXPR_ARG (exp, 2);
1541 else
1542 arg2 = integer_three_node;
1544 /* Argument 0 is an address. */
1545 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1547 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1548 if (TREE_CODE (arg1) != INTEGER_CST)
1550 error ("second argument to %<__builtin_prefetch%> must be a constant");
1551 arg1 = integer_zero_node;
1553 op1 = expand_normal (arg1);
1554 /* Argument 1 must be either zero or one. */
1555 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1557 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1558 " using zero");
1559 op1 = const0_rtx;
1562 /* Argument 2 (locality) must be a compile-time constant int. */
1563 if (TREE_CODE (arg2) != INTEGER_CST)
1565 error ("third argument to %<__builtin_prefetch%> must be a constant");
1566 arg2 = integer_zero_node;
1568 op2 = expand_normal (arg2);
1569 /* Argument 2 must be 0, 1, 2, or 3. */
1570 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1572 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1573 op2 = const0_rtx;
1576 if (targetm.have_prefetch ())
1578 class expand_operand ops[3];
1580 create_address_operand (&ops[0], op0);
1581 create_integer_operand (&ops[1], INTVAL (op1));
1582 create_integer_operand (&ops[2], INTVAL (op2));
1583 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1584 return;
1587 /* Don't do anything with direct references to volatile memory, but
1588 generate code to handle other side effects. */
1589 if (!MEM_P (op0) && side_effects_p (op0))
1590 emit_insn (op0);
1593 /* Get a MEM rtx for expression EXP which is the address of an operand
1594 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
1595 the maximum length of the block of memory that might be accessed or
1596 NULL if unknown. */
1598 static rtx
1599 get_memory_rtx (tree exp, tree len)
1601 tree orig_exp = exp;
1602 rtx addr, mem;
1604 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1605 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1606 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1607 exp = TREE_OPERAND (exp, 0);
1609 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1610 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1612 /* Get an expression we can use to find the attributes to assign to MEM.
1613 First remove any nops. */
1614 while (CONVERT_EXPR_P (exp)
1615 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1616 exp = TREE_OPERAND (exp, 0);
1618 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1619 (as builtin stringops may alias with anything). */
1620 exp = fold_build2 (MEM_REF,
1621 build_array_type (char_type_node,
1622 build_range_type (sizetype,
1623 size_one_node, len)),
1624 exp, build_int_cst (ptr_type_node, 0));
1626 /* If the MEM_REF has no acceptable address, try to get the base object
1627 from the original address we got, and build an all-aliasing
1628 unknown-sized access to that one. */
1629 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1630 set_mem_attributes (mem, exp, 0);
1631 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1632 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1633 0))))
1635 exp = build_fold_addr_expr (exp);
1636 exp = fold_build2 (MEM_REF,
1637 build_array_type (char_type_node,
1638 build_range_type (sizetype,
1639 size_zero_node,
1640 NULL)),
1641 exp, build_int_cst (ptr_type_node, 0));
1642 set_mem_attributes (mem, exp, 0);
1644 set_mem_alias_set (mem, 0);
1645 return mem;
1648 /* Built-in functions to perform an untyped call and return. */
1650 #define apply_args_mode \
1651 (this_target_builtins->x_apply_args_mode)
1652 #define apply_result_mode \
1653 (this_target_builtins->x_apply_result_mode)
1655 /* Return the size required for the block returned by __builtin_apply_args,
1656 and initialize apply_args_mode. */
1658 static int
1659 apply_args_size (void)
1661 static int size = -1;
1662 int align;
1663 unsigned int regno;
1665 /* The values computed by this function never change. */
1666 if (size < 0)
1668 /* The first value is the incoming arg-pointer. */
1669 size = GET_MODE_SIZE (Pmode);
1671 /* The second value is the structure value address unless this is
1672 passed as an "invisible" first argument. */
1673 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1674 size += GET_MODE_SIZE (Pmode);
1676 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1677 if (FUNCTION_ARG_REGNO_P (regno))
1679 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1681 gcc_assert (mode != VOIDmode);
1683 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1684 if (size % align != 0)
1685 size = CEIL (size, align) * align;
1686 size += GET_MODE_SIZE (mode);
1687 apply_args_mode[regno] = mode;
1689 else
1691 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1694 return size;
1697 /* Return the size required for the block returned by __builtin_apply,
1698 and initialize apply_result_mode. */
1700 static int
1701 apply_result_size (void)
1703 static int size = -1;
1704 int align, regno;
1706 /* The values computed by this function never change. */
1707 if (size < 0)
1709 size = 0;
1711 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1712 if (targetm.calls.function_value_regno_p (regno))
1714 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1716 gcc_assert (mode != VOIDmode);
1718 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1719 if (size % align != 0)
1720 size = CEIL (size, align) * align;
1721 size += GET_MODE_SIZE (mode);
1722 apply_result_mode[regno] = mode;
1724 else
1725 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1727 /* Allow targets that use untyped_call and untyped_return to override
1728 the size so that machine-specific information can be stored here. */
1729 #ifdef APPLY_RESULT_SIZE
1730 size = APPLY_RESULT_SIZE;
1731 #endif
1733 return size;
1736 /* Create a vector describing the result block RESULT. If SAVEP is true,
1737 the result block is used to save the values; otherwise it is used to
1738 restore the values. */
1740 static rtx
1741 result_vector (int savep, rtx result)
1743 int regno, size, align, nelts;
1744 fixed_size_mode mode;
1745 rtx reg, mem;
1746 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1748 size = nelts = 0;
1749 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1750 if ((mode = apply_result_mode[regno]) != VOIDmode)
1752 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1753 if (size % align != 0)
1754 size = CEIL (size, align) * align;
1755 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1756 mem = adjust_address (result, mode, size);
1757 savevec[nelts++] = (savep
1758 ? gen_rtx_SET (mem, reg)
1759 : gen_rtx_SET (reg, mem));
1760 size += GET_MODE_SIZE (mode);
1762 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1765 /* Save the state required to perform an untyped call with the same
1766 arguments as were passed to the current function. */
1768 static rtx
1769 expand_builtin_apply_args_1 (void)
1771 rtx registers, tem;
1772 int size, align, regno;
1773 fixed_size_mode mode;
1774 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1776 /* Create a block where the arg-pointer, structure value address,
1777 and argument registers can be saved. */
1778 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1780 /* Walk past the arg-pointer and structure value address. */
1781 size = GET_MODE_SIZE (Pmode);
1782 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1783 size += GET_MODE_SIZE (Pmode);
1785 /* Save each register used in calling a function to the block. */
1786 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1787 if ((mode = apply_args_mode[regno]) != VOIDmode)
1789 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1790 if (size % align != 0)
1791 size = CEIL (size, align) * align;
1793 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1795 emit_move_insn (adjust_address (registers, mode, size), tem);
1796 size += GET_MODE_SIZE (mode);
1799 /* Save the arg pointer to the block. */
1800 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1801 /* We need the pointer as the caller actually passed them to us, not
1802 as we might have pretended they were passed. Make sure it's a valid
1803 operand, as emit_move_insn isn't expected to handle a PLUS. */
1804 if (STACK_GROWS_DOWNWARD)
1806 = force_operand (plus_constant (Pmode, tem,
1807 crtl->args.pretend_args_size),
1808 NULL_RTX);
1809 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1811 size = GET_MODE_SIZE (Pmode);
1813 /* Save the structure value address unless this is passed as an
1814 "invisible" first argument. */
1815 if (struct_incoming_value)
1816 emit_move_insn (adjust_address (registers, Pmode, size),
1817 copy_to_reg (struct_incoming_value));
1819 /* Return the address of the block. */
1820 return copy_addr_to_reg (XEXP (registers, 0));
1823 /* __builtin_apply_args returns block of memory allocated on
1824 the stack into which is stored the arg pointer, structure
1825 value address, static chain, and all the registers that might
1826 possibly be used in performing a function call. The code is
1827 moved to the start of the function so the incoming values are
1828 saved. */
1830 static rtx
1831 expand_builtin_apply_args (void)
1833 /* Don't do __builtin_apply_args more than once in a function.
1834 Save the result of the first call and reuse it. */
1835 if (apply_args_value != 0)
1836 return apply_args_value;
1838 /* When this function is called, it means that registers must be
1839 saved on entry to this function. So we migrate the
1840 call to the first insn of this function. */
1841 rtx temp;
1843 start_sequence ();
1844 temp = expand_builtin_apply_args_1 ();
1845 rtx_insn *seq = get_insns ();
1846 end_sequence ();
1848 apply_args_value = temp;
1850 /* Put the insns after the NOTE that starts the function.
1851 If this is inside a start_sequence, make the outer-level insn
1852 chain current, so the code is placed at the start of the
1853 function. If internal_arg_pointer is a non-virtual pseudo,
1854 it needs to be placed after the function that initializes
1855 that pseudo. */
1856 push_topmost_sequence ();
1857 if (REG_P (crtl->args.internal_arg_pointer)
1858 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1859 emit_insn_before (seq, parm_birth_insn);
1860 else
1861 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1862 pop_topmost_sequence ();
1863 return temp;
1867 /* Perform an untyped call and save the state required to perform an
1868 untyped return of whatever value was returned by the given function. */
1870 static rtx
1871 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1873 int size, align, regno;
1874 fixed_size_mode mode;
1875 rtx incoming_args, result, reg, dest, src;
1876 rtx_call_insn *call_insn;
1877 rtx old_stack_level = 0;
1878 rtx call_fusage = 0;
1879 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1881 arguments = convert_memory_address (Pmode, arguments);
1883 /* Create a block where the return registers can be saved. */
1884 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1886 /* Fetch the arg pointer from the ARGUMENTS block. */
1887 incoming_args = gen_reg_rtx (Pmode);
1888 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1889 if (!STACK_GROWS_DOWNWARD)
1890 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1891 incoming_args, 0, OPTAB_LIB_WIDEN);
1893 /* Push a new argument block and copy the arguments. Do not allow
1894 the (potential) memcpy call below to interfere with our stack
1895 manipulations. */
1896 do_pending_stack_adjust ();
1897 NO_DEFER_POP;
1899 /* Save the stack with nonlocal if available. */
1900 if (targetm.have_save_stack_nonlocal ())
1901 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1902 else
1903 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1905 /* Allocate a block of memory onto the stack and copy the memory
1906 arguments to the outgoing arguments address. We can pass TRUE
1907 as the 4th argument because we just saved the stack pointer
1908 and will restore it right after the call. */
1909 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1911 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1912 may have already set current_function_calls_alloca to true.
1913 current_function_calls_alloca won't be set if argsize is zero,
1914 so we have to guarantee need_drap is true here. */
1915 if (SUPPORTS_STACK_ALIGNMENT)
1916 crtl->need_drap = true;
1918 dest = virtual_outgoing_args_rtx;
1919 if (!STACK_GROWS_DOWNWARD)
1921 if (CONST_INT_P (argsize))
1922 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1923 else
1924 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1926 dest = gen_rtx_MEM (BLKmode, dest);
1927 set_mem_align (dest, PARM_BOUNDARY);
1928 src = gen_rtx_MEM (BLKmode, incoming_args);
1929 set_mem_align (src, PARM_BOUNDARY);
1930 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1932 /* Refer to the argument block. */
1933 apply_args_size ();
1934 arguments = gen_rtx_MEM (BLKmode, arguments);
1935 set_mem_align (arguments, PARM_BOUNDARY);
1937 /* Walk past the arg-pointer and structure value address. */
1938 size = GET_MODE_SIZE (Pmode);
1939 if (struct_value)
1940 size += GET_MODE_SIZE (Pmode);
1942 /* Restore each of the registers previously saved. Make USE insns
1943 for each of these registers for use in making the call. */
1944 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1945 if ((mode = apply_args_mode[regno]) != VOIDmode)
1947 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1948 if (size % align != 0)
1949 size = CEIL (size, align) * align;
1950 reg = gen_rtx_REG (mode, regno);
1951 emit_move_insn (reg, adjust_address (arguments, mode, size));
1952 use_reg (&call_fusage, reg);
1953 size += GET_MODE_SIZE (mode);
1956 /* Restore the structure value address unless this is passed as an
1957 "invisible" first argument. */
1958 size = GET_MODE_SIZE (Pmode);
1959 if (struct_value)
1961 rtx value = gen_reg_rtx (Pmode);
1962 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1963 emit_move_insn (struct_value, value);
1964 if (REG_P (struct_value))
1965 use_reg (&call_fusage, struct_value);
1968 /* All arguments and registers used for the call are set up by now! */
1969 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1971 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1972 and we don't want to load it into a register as an optimization,
1973 because prepare_call_address already did it if it should be done. */
1974 if (GET_CODE (function) != SYMBOL_REF)
1975 function = memory_address (FUNCTION_MODE, function);
1977 /* Generate the actual call instruction and save the return value. */
1978 if (targetm.have_untyped_call ())
1980 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1981 emit_call_insn (targetm.gen_untyped_call (mem, result,
1982 result_vector (1, result)));
1984 else if (targetm.have_call_value ())
1986 rtx valreg = 0;
1988 /* Locate the unique return register. It is not possible to
1989 express a call that sets more than one return register using
1990 call_value; use untyped_call for that. In fact, untyped_call
1991 only needs to save the return registers in the given block. */
1992 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1993 if ((mode = apply_result_mode[regno]) != VOIDmode)
1995 gcc_assert (!valreg); /* have_untyped_call required. */
1997 valreg = gen_rtx_REG (mode, regno);
2000 emit_insn (targetm.gen_call_value (valreg,
2001 gen_rtx_MEM (FUNCTION_MODE, function),
2002 const0_rtx, NULL_RTX, const0_rtx));
2004 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
2006 else
2007 gcc_unreachable ();
2009 /* Find the CALL insn we just emitted, and attach the register usage
2010 information. */
2011 call_insn = last_call_insn ();
2012 add_function_usage_to (call_insn, call_fusage);
2014 /* Restore the stack. */
2015 if (targetm.have_save_stack_nonlocal ())
2016 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
2017 else
2018 emit_stack_restore (SAVE_BLOCK, old_stack_level);
2019 fixup_args_size_notes (call_insn, get_last_insn (), 0);
2021 OK_DEFER_POP;
2023 /* Return the address of the result block. */
2024 result = copy_addr_to_reg (XEXP (result, 0));
2025 return convert_memory_address (ptr_mode, result);
2028 /* Perform an untyped return. */
2030 static void
2031 expand_builtin_return (rtx result)
2033 int size, align, regno;
2034 fixed_size_mode mode;
2035 rtx reg;
2036 rtx_insn *call_fusage = 0;
2038 result = convert_memory_address (Pmode, result);
2040 apply_result_size ();
2041 result = gen_rtx_MEM (BLKmode, result);
2043 if (targetm.have_untyped_return ())
2045 rtx vector = result_vector (0, result);
2046 emit_jump_insn (targetm.gen_untyped_return (result, vector));
2047 emit_barrier ();
2048 return;
2051 /* Restore the return value and note that each value is used. */
2052 size = 0;
2053 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2054 if ((mode = apply_result_mode[regno]) != VOIDmode)
2056 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2057 if (size % align != 0)
2058 size = CEIL (size, align) * align;
2059 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
2060 emit_move_insn (reg, adjust_address (result, mode, size));
2062 push_to_sequence (call_fusage);
2063 emit_use (reg);
2064 call_fusage = get_insns ();
2065 end_sequence ();
2066 size += GET_MODE_SIZE (mode);
2069 /* Put the USE insns before the return. */
2070 emit_insn (call_fusage);
2072 /* Return whatever values was restored by jumping directly to the end
2073 of the function. */
2074 expand_naked_return ();
2077 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
2079 static enum type_class
2080 type_to_class (tree type)
2082 switch (TREE_CODE (type))
2084 case VOID_TYPE: return void_type_class;
2085 case INTEGER_TYPE: return integer_type_class;
2086 case ENUMERAL_TYPE: return enumeral_type_class;
2087 case BOOLEAN_TYPE: return boolean_type_class;
2088 case POINTER_TYPE: return pointer_type_class;
2089 case REFERENCE_TYPE: return reference_type_class;
2090 case OFFSET_TYPE: return offset_type_class;
2091 case REAL_TYPE: return real_type_class;
2092 case COMPLEX_TYPE: return complex_type_class;
2093 case FUNCTION_TYPE: return function_type_class;
2094 case METHOD_TYPE: return method_type_class;
2095 case RECORD_TYPE: return record_type_class;
2096 case UNION_TYPE:
2097 case QUAL_UNION_TYPE: return union_type_class;
2098 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
2099 ? string_type_class : array_type_class);
2100 case LANG_TYPE: return lang_type_class;
2101 default: return no_type_class;
2105 /* Expand a call EXP to __builtin_classify_type. */
2107 static rtx
2108 expand_builtin_classify_type (tree exp)
2110 if (call_expr_nargs (exp))
2111 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
2112 return GEN_INT (no_type_class);
2115 /* This helper macro, meant to be used in mathfn_built_in below, determines
2116 which among a set of builtin math functions is appropriate for a given type
2117 mode. The `F' (float) and `L' (long double) are automatically generated
2118 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
2119 types, there are additional types that are considered with 'F32', 'F64',
2120 'F128', etc. suffixes. */
2121 #define CASE_MATHFN(MATHFN) \
2122 CASE_CFN_##MATHFN: \
2123 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
2124 fcodel = BUILT_IN_##MATHFN##L ; break;
2125 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
2126 types. */
2127 #define CASE_MATHFN_FLOATN(MATHFN) \
2128 CASE_CFN_##MATHFN: \
2129 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
2130 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
2131 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
2132 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
2133 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
2134 break;
2135 /* Similar to above, but appends _R after any F/L suffix. */
2136 #define CASE_MATHFN_REENT(MATHFN) \
2137 case CFN_BUILT_IN_##MATHFN##_R: \
2138 case CFN_BUILT_IN_##MATHFN##F_R: \
2139 case CFN_BUILT_IN_##MATHFN##L_R: \
2140 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
2141 fcodel = BUILT_IN_##MATHFN##L_R ; break;
2143 /* Return a function equivalent to FN but operating on floating-point
2144 values of type TYPE, or END_BUILTINS if no such function exists.
2145 This is purely an operation on function codes; it does not guarantee
2146 that the target actually has an implementation of the function. */
2148 static built_in_function
2149 mathfn_built_in_2 (tree type, combined_fn fn)
2151 tree mtype;
2152 built_in_function fcode, fcodef, fcodel;
2153 built_in_function fcodef16 = END_BUILTINS;
2154 built_in_function fcodef32 = END_BUILTINS;
2155 built_in_function fcodef64 = END_BUILTINS;
2156 built_in_function fcodef128 = END_BUILTINS;
2157 built_in_function fcodef32x = END_BUILTINS;
2158 built_in_function fcodef64x = END_BUILTINS;
2159 built_in_function fcodef128x = END_BUILTINS;
2161 switch (fn)
2163 #define SEQ_OF_CASE_MATHFN \
2164 CASE_MATHFN (ACOS) \
2165 CASE_MATHFN (ACOSH) \
2166 CASE_MATHFN (ASIN) \
2167 CASE_MATHFN (ASINH) \
2168 CASE_MATHFN (ATAN) \
2169 CASE_MATHFN (ATAN2) \
2170 CASE_MATHFN (ATANH) \
2171 CASE_MATHFN (CBRT) \
2172 CASE_MATHFN_FLOATN (CEIL) \
2173 CASE_MATHFN (CEXPI) \
2174 CASE_MATHFN_FLOATN (COPYSIGN) \
2175 CASE_MATHFN (COS) \
2176 CASE_MATHFN (COSH) \
2177 CASE_MATHFN (DREM) \
2178 CASE_MATHFN (ERF) \
2179 CASE_MATHFN (ERFC) \
2180 CASE_MATHFN (EXP) \
2181 CASE_MATHFN (EXP10) \
2182 CASE_MATHFN (EXP2) \
2183 CASE_MATHFN (EXPM1) \
2184 CASE_MATHFN (FABS) \
2185 CASE_MATHFN (FDIM) \
2186 CASE_MATHFN_FLOATN (FLOOR) \
2187 CASE_MATHFN_FLOATN (FMA) \
2188 CASE_MATHFN_FLOATN (FMAX) \
2189 CASE_MATHFN_FLOATN (FMIN) \
2190 CASE_MATHFN (FMOD) \
2191 CASE_MATHFN (FREXP) \
2192 CASE_MATHFN (GAMMA) \
2193 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */ \
2194 CASE_MATHFN (HUGE_VAL) \
2195 CASE_MATHFN (HYPOT) \
2196 CASE_MATHFN (ILOGB) \
2197 CASE_MATHFN (ICEIL) \
2198 CASE_MATHFN (IFLOOR) \
2199 CASE_MATHFN (INF) \
2200 CASE_MATHFN (IRINT) \
2201 CASE_MATHFN (IROUND) \
2202 CASE_MATHFN (ISINF) \
2203 CASE_MATHFN (J0) \
2204 CASE_MATHFN (J1) \
2205 CASE_MATHFN (JN) \
2206 CASE_MATHFN (LCEIL) \
2207 CASE_MATHFN (LDEXP) \
2208 CASE_MATHFN (LFLOOR) \
2209 CASE_MATHFN (LGAMMA) \
2210 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */ \
2211 CASE_MATHFN (LLCEIL) \
2212 CASE_MATHFN (LLFLOOR) \
2213 CASE_MATHFN (LLRINT) \
2214 CASE_MATHFN (LLROUND) \
2215 CASE_MATHFN (LOG) \
2216 CASE_MATHFN (LOG10) \
2217 CASE_MATHFN (LOG1P) \
2218 CASE_MATHFN (LOG2) \
2219 CASE_MATHFN (LOGB) \
2220 CASE_MATHFN (LRINT) \
2221 CASE_MATHFN (LROUND) \
2222 CASE_MATHFN (MODF) \
2223 CASE_MATHFN (NAN) \
2224 CASE_MATHFN (NANS) \
2225 CASE_MATHFN_FLOATN (NEARBYINT) \
2226 CASE_MATHFN (NEXTAFTER) \
2227 CASE_MATHFN (NEXTTOWARD) \
2228 CASE_MATHFN (POW) \
2229 CASE_MATHFN (POWI) \
2230 CASE_MATHFN (POW10) \
2231 CASE_MATHFN (REMAINDER) \
2232 CASE_MATHFN (REMQUO) \
2233 CASE_MATHFN_FLOATN (RINT) \
2234 CASE_MATHFN_FLOATN (ROUND) \
2235 CASE_MATHFN_FLOATN (ROUNDEVEN) \
2236 CASE_MATHFN (SCALB) \
2237 CASE_MATHFN (SCALBLN) \
2238 CASE_MATHFN (SCALBN) \
2239 CASE_MATHFN (SIGNBIT) \
2240 CASE_MATHFN (SIGNIFICAND) \
2241 CASE_MATHFN (SIN) \
2242 CASE_MATHFN (SINCOS) \
2243 CASE_MATHFN (SINH) \
2244 CASE_MATHFN_FLOATN (SQRT) \
2245 CASE_MATHFN (TAN) \
2246 CASE_MATHFN (TANH) \
2247 CASE_MATHFN (TGAMMA) \
2248 CASE_MATHFN_FLOATN (TRUNC) \
2249 CASE_MATHFN (Y0) \
2250 CASE_MATHFN (Y1) \
2251 CASE_MATHFN (YN)
2253 SEQ_OF_CASE_MATHFN
2255 default:
2256 return END_BUILTINS;
2259 mtype = TYPE_MAIN_VARIANT (type);
2260 if (mtype == double_type_node)
2261 return fcode;
2262 else if (mtype == float_type_node)
2263 return fcodef;
2264 else if (mtype == long_double_type_node)
2265 return fcodel;
2266 else if (mtype == float16_type_node)
2267 return fcodef16;
2268 else if (mtype == float32_type_node)
2269 return fcodef32;
2270 else if (mtype == float64_type_node)
2271 return fcodef64;
2272 else if (mtype == float128_type_node)
2273 return fcodef128;
2274 else if (mtype == float32x_type_node)
2275 return fcodef32x;
2276 else if (mtype == float64x_type_node)
2277 return fcodef64x;
2278 else if (mtype == float128x_type_node)
2279 return fcodef128x;
2280 else
2281 return END_BUILTINS;
2284 #undef CASE_MATHFN
2285 #undef CASE_MATHFN_FLOATN
2286 #undef CASE_MATHFN_REENT
2288 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2289 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2290 otherwise use the explicit declaration. If we can't do the conversion,
2291 return null. */
2293 static tree
2294 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2296 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2297 if (fcode2 == END_BUILTINS)
2298 return NULL_TREE;
2300 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2301 return NULL_TREE;
2303 return builtin_decl_explicit (fcode2);
2306 /* Like mathfn_built_in_1, but always use the implicit array. */
2308 tree
2309 mathfn_built_in (tree type, combined_fn fn)
2311 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2314 /* Like mathfn_built_in_1, but take a built_in_function and
2315 always use the implicit array. */
2317 tree
2318 mathfn_built_in (tree type, enum built_in_function fn)
2320 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2323 /* Return the type associated with a built in function, i.e., the one
2324 to be passed to mathfn_built_in to get the type-specific
2325 function. */
2327 tree
2328 mathfn_built_in_type (combined_fn fn)
2330 #define CASE_MATHFN(MATHFN) \
2331 case BUILT_IN_##MATHFN: \
2332 return double_type_node; \
2333 case BUILT_IN_##MATHFN##F: \
2334 return float_type_node; \
2335 case BUILT_IN_##MATHFN##L: \
2336 return long_double_type_node;
2338 #define CASE_MATHFN_FLOATN(MATHFN) \
2339 CASE_MATHFN(MATHFN) \
2340 case BUILT_IN_##MATHFN##F16: \
2341 return float16_type_node; \
2342 case BUILT_IN_##MATHFN##F32: \
2343 return float32_type_node; \
2344 case BUILT_IN_##MATHFN##F64: \
2345 return float64_type_node; \
2346 case BUILT_IN_##MATHFN##F128: \
2347 return float128_type_node; \
2348 case BUILT_IN_##MATHFN##F32X: \
2349 return float32x_type_node; \
2350 case BUILT_IN_##MATHFN##F64X: \
2351 return float64x_type_node; \
2352 case BUILT_IN_##MATHFN##F128X: \
2353 return float128x_type_node;
2355 /* Similar to above, but appends _R after any F/L suffix. */
2356 #define CASE_MATHFN_REENT(MATHFN) \
2357 case BUILT_IN_##MATHFN##_R: \
2358 return double_type_node; \
2359 case BUILT_IN_##MATHFN##F_R: \
2360 return float_type_node; \
2361 case BUILT_IN_##MATHFN##L_R: \
2362 return long_double_type_node;
2364 switch (fn)
2366 SEQ_OF_CASE_MATHFN
2368 default:
2369 return NULL_TREE;
2372 #undef CASE_MATHFN
2373 #undef CASE_MATHFN_FLOATN
2374 #undef CASE_MATHFN_REENT
2375 #undef SEQ_OF_CASE_MATHFN
2378 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2379 return its code, otherwise return IFN_LAST. Note that this function
2380 only tests whether the function is defined in internals.def, not whether
2381 it is actually available on the target. */
2383 internal_fn
2384 associated_internal_fn (tree fndecl)
2386 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2387 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2388 switch (DECL_FUNCTION_CODE (fndecl))
2390 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2391 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2392 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2393 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2394 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2395 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2396 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2397 #include "internal-fn.def"
2399 CASE_FLT_FN (BUILT_IN_POW10):
2400 return IFN_EXP10;
2402 CASE_FLT_FN (BUILT_IN_DREM):
2403 return IFN_REMAINDER;
2405 CASE_FLT_FN (BUILT_IN_SCALBN):
2406 CASE_FLT_FN (BUILT_IN_SCALBLN):
2407 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2408 return IFN_LDEXP;
2409 return IFN_LAST;
2411 default:
2412 return IFN_LAST;
2416 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2417 on the current target by a call to an internal function, return the
2418 code of that internal function, otherwise return IFN_LAST. The caller
2419 is responsible for ensuring that any side-effects of the built-in
2420 call are dealt with correctly. E.g. if CALL sets errno, the caller
2421 must decide that the errno result isn't needed or make it available
2422 in some other way. */
2424 internal_fn
2425 replacement_internal_fn (gcall *call)
2427 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2429 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2430 if (ifn != IFN_LAST)
2432 tree_pair types = direct_internal_fn_types (ifn, call);
2433 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2434 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2435 return ifn;
2438 return IFN_LAST;
2441 /* Expand a call to the builtin trinary math functions (fma).
2442 Return NULL_RTX if a normal call should be emitted rather than expanding the
2443 function in-line. EXP is the expression that is a call to the builtin
2444 function; if convenient, the result should be placed in TARGET.
2445 SUBTARGET may be used as the target for computing one of EXP's
2446 operands. */
2448 static rtx
2449 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2451 optab builtin_optab;
2452 rtx op0, op1, op2, result;
2453 rtx_insn *insns;
2454 tree fndecl = get_callee_fndecl (exp);
2455 tree arg0, arg1, arg2;
2456 machine_mode mode;
2458 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2459 return NULL_RTX;
2461 arg0 = CALL_EXPR_ARG (exp, 0);
2462 arg1 = CALL_EXPR_ARG (exp, 1);
2463 arg2 = CALL_EXPR_ARG (exp, 2);
2465 switch (DECL_FUNCTION_CODE (fndecl))
2467 CASE_FLT_FN (BUILT_IN_FMA):
2468 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2469 builtin_optab = fma_optab; break;
2470 default:
2471 gcc_unreachable ();
2474 /* Make a suitable register to place result in. */
2475 mode = TYPE_MODE (TREE_TYPE (exp));
2477 /* Before working hard, check whether the instruction is available. */
2478 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2479 return NULL_RTX;
2481 result = gen_reg_rtx (mode);
2483 /* Always stabilize the argument list. */
2484 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2485 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2486 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2488 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2489 op1 = expand_normal (arg1);
2490 op2 = expand_normal (arg2);
2492 start_sequence ();
2494 /* Compute into RESULT.
2495 Set RESULT to wherever the result comes back. */
2496 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2497 result, 0);
2499 /* If we were unable to expand via the builtin, stop the sequence
2500 (without outputting the insns) and call to the library function
2501 with the stabilized argument list. */
2502 if (result == 0)
2504 end_sequence ();
2505 return expand_call (exp, target, target == const0_rtx);
2508 /* Output the entire sequence. */
2509 insns = get_insns ();
2510 end_sequence ();
2511 emit_insn (insns);
2513 return result;
2516 /* Expand a call to the builtin sin and cos math functions.
2517 Return NULL_RTX if a normal call should be emitted rather than expanding the
2518 function in-line. EXP is the expression that is a call to the builtin
2519 function; if convenient, the result should be placed in TARGET.
2520 SUBTARGET may be used as the target for computing one of EXP's
2521 operands. */
2523 static rtx
2524 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2526 optab builtin_optab;
2527 rtx op0;
2528 rtx_insn *insns;
2529 tree fndecl = get_callee_fndecl (exp);
2530 machine_mode mode;
2531 tree arg;
2533 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2534 return NULL_RTX;
2536 arg = CALL_EXPR_ARG (exp, 0);
2538 switch (DECL_FUNCTION_CODE (fndecl))
2540 CASE_FLT_FN (BUILT_IN_SIN):
2541 CASE_FLT_FN (BUILT_IN_COS):
2542 builtin_optab = sincos_optab; break;
2543 default:
2544 gcc_unreachable ();
2547 /* Make a suitable register to place result in. */
2548 mode = TYPE_MODE (TREE_TYPE (exp));
2550 /* Check if sincos insn is available, otherwise fallback
2551 to sin or cos insn. */
2552 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2553 switch (DECL_FUNCTION_CODE (fndecl))
2555 CASE_FLT_FN (BUILT_IN_SIN):
2556 builtin_optab = sin_optab; break;
2557 CASE_FLT_FN (BUILT_IN_COS):
2558 builtin_optab = cos_optab; break;
2559 default:
2560 gcc_unreachable ();
2563 /* Before working hard, check whether the instruction is available. */
2564 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2566 rtx result = gen_reg_rtx (mode);
2568 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2569 need to expand the argument again. This way, we will not perform
2570 side-effects more the once. */
2571 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2573 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2575 start_sequence ();
2577 /* Compute into RESULT.
2578 Set RESULT to wherever the result comes back. */
2579 if (builtin_optab == sincos_optab)
2581 int ok;
2583 switch (DECL_FUNCTION_CODE (fndecl))
2585 CASE_FLT_FN (BUILT_IN_SIN):
2586 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2587 break;
2588 CASE_FLT_FN (BUILT_IN_COS):
2589 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2590 break;
2591 default:
2592 gcc_unreachable ();
2594 gcc_assert (ok);
2596 else
2597 result = expand_unop (mode, builtin_optab, op0, result, 0);
2599 if (result != 0)
2601 /* Output the entire sequence. */
2602 insns = get_insns ();
2603 end_sequence ();
2604 emit_insn (insns);
2605 return result;
2608 /* If we were unable to expand via the builtin, stop the sequence
2609 (without outputting the insns) and call to the library function
2610 with the stabilized argument list. */
2611 end_sequence ();
2614 return expand_call (exp, target, target == const0_rtx);
2617 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2618 return an RTL instruction code that implements the functionality.
2619 If that isn't possible or available return CODE_FOR_nothing. */
2621 static enum insn_code
2622 interclass_mathfn_icode (tree arg, tree fndecl)
2624 bool errno_set = false;
2625 optab builtin_optab = unknown_optab;
2626 machine_mode mode;
2628 switch (DECL_FUNCTION_CODE (fndecl))
2630 CASE_FLT_FN (BUILT_IN_ILOGB):
2631 errno_set = true; builtin_optab = ilogb_optab; break;
2632 CASE_FLT_FN (BUILT_IN_ISINF):
2633 builtin_optab = isinf_optab; break;
2634 case BUILT_IN_ISNORMAL:
2635 case BUILT_IN_ISFINITE:
2636 CASE_FLT_FN (BUILT_IN_FINITE):
2637 case BUILT_IN_FINITED32:
2638 case BUILT_IN_FINITED64:
2639 case BUILT_IN_FINITED128:
2640 case BUILT_IN_ISINFD32:
2641 case BUILT_IN_ISINFD64:
2642 case BUILT_IN_ISINFD128:
2643 /* These builtins have no optabs (yet). */
2644 break;
2645 default:
2646 gcc_unreachable ();
2649 /* There's no easy way to detect the case we need to set EDOM. */
2650 if (flag_errno_math && errno_set)
2651 return CODE_FOR_nothing;
2653 /* Optab mode depends on the mode of the input argument. */
2654 mode = TYPE_MODE (TREE_TYPE (arg));
2656 if (builtin_optab)
2657 return optab_handler (builtin_optab, mode);
2658 return CODE_FOR_nothing;
2661 /* Expand a call to one of the builtin math functions that operate on
2662 floating point argument and output an integer result (ilogb, isinf,
2663 isnan, etc).
2664 Return 0 if a normal call should be emitted rather than expanding the
2665 function in-line. EXP is the expression that is a call to the builtin
2666 function; if convenient, the result should be placed in TARGET. */
2668 static rtx
2669 expand_builtin_interclass_mathfn (tree exp, rtx target)
2671 enum insn_code icode = CODE_FOR_nothing;
2672 rtx op0;
2673 tree fndecl = get_callee_fndecl (exp);
2674 machine_mode mode;
2675 tree arg;
2677 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2678 return NULL_RTX;
2680 arg = CALL_EXPR_ARG (exp, 0);
2681 icode = interclass_mathfn_icode (arg, fndecl);
2682 mode = TYPE_MODE (TREE_TYPE (arg));
2684 if (icode != CODE_FOR_nothing)
2686 class expand_operand ops[1];
2687 rtx_insn *last = get_last_insn ();
2688 tree orig_arg = arg;
2690 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2691 need to expand the argument again. This way, we will not perform
2692 side-effects more the once. */
2693 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2695 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2697 if (mode != GET_MODE (op0))
2698 op0 = convert_to_mode (mode, op0, 0);
2700 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2701 if (maybe_legitimize_operands (icode, 0, 1, ops)
2702 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2703 return ops[0].value;
2705 delete_insns_since (last);
2706 CALL_EXPR_ARG (exp, 0) = orig_arg;
2709 return NULL_RTX;
2712 /* Expand a call to the builtin sincos math function.
2713 Return NULL_RTX if a normal call should be emitted rather than expanding the
2714 function in-line. EXP is the expression that is a call to the builtin
2715 function. */
2717 static rtx
2718 expand_builtin_sincos (tree exp)
2720 rtx op0, op1, op2, target1, target2;
2721 machine_mode mode;
2722 tree arg, sinp, cosp;
2723 int result;
2724 location_t loc = EXPR_LOCATION (exp);
2725 tree alias_type, alias_off;
2727 if (!validate_arglist (exp, REAL_TYPE,
2728 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2729 return NULL_RTX;
2731 arg = CALL_EXPR_ARG (exp, 0);
2732 sinp = CALL_EXPR_ARG (exp, 1);
2733 cosp = CALL_EXPR_ARG (exp, 2);
2735 /* Make a suitable register to place result in. */
2736 mode = TYPE_MODE (TREE_TYPE (arg));
2738 /* Check if sincos insn is available, otherwise emit the call. */
2739 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2740 return NULL_RTX;
2742 target1 = gen_reg_rtx (mode);
2743 target2 = gen_reg_rtx (mode);
2745 op0 = expand_normal (arg);
2746 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2747 alias_off = build_int_cst (alias_type, 0);
2748 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2749 sinp, alias_off));
2750 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2751 cosp, alias_off));
2753 /* Compute into target1 and target2.
2754 Set TARGET to wherever the result comes back. */
2755 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2756 gcc_assert (result);
2758 /* Move target1 and target2 to the memory locations indicated
2759 by op1 and op2. */
2760 emit_move_insn (op1, target1);
2761 emit_move_insn (op2, target2);
2763 return const0_rtx;
2766 /* Expand a call to the internal cexpi builtin to the sincos math function.
2767 EXP is the expression that is a call to the builtin function; if convenient,
2768 the result should be placed in TARGET. */
2770 static rtx
2771 expand_builtin_cexpi (tree exp, rtx target)
2773 tree fndecl = get_callee_fndecl (exp);
2774 tree arg, type;
2775 machine_mode mode;
2776 rtx op0, op1, op2;
2777 location_t loc = EXPR_LOCATION (exp);
2779 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2780 return NULL_RTX;
2782 arg = CALL_EXPR_ARG (exp, 0);
2783 type = TREE_TYPE (arg);
2784 mode = TYPE_MODE (TREE_TYPE (arg));
2786 /* Try expanding via a sincos optab, fall back to emitting a libcall
2787 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2788 is only generated from sincos, cexp or if we have either of them. */
2789 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2791 op1 = gen_reg_rtx (mode);
2792 op2 = gen_reg_rtx (mode);
2794 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2796 /* Compute into op1 and op2. */
2797 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2799 else if (targetm.libc_has_function (function_sincos, type))
2801 tree call, fn = NULL_TREE;
2802 tree top1, top2;
2803 rtx op1a, op2a;
2805 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2806 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2807 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2808 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2809 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2810 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2811 else
2812 gcc_unreachable ();
2814 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2815 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2816 op1a = copy_addr_to_reg (XEXP (op1, 0));
2817 op2a = copy_addr_to_reg (XEXP (op2, 0));
2818 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2819 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2821 /* Make sure not to fold the sincos call again. */
2822 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2823 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2824 call, 3, arg, top1, top2));
2826 else
2828 tree call, fn = NULL_TREE, narg;
2829 tree ctype = build_complex_type (type);
2831 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2832 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2833 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2834 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2835 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2836 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2837 else
2838 gcc_unreachable ();
2840 /* If we don't have a decl for cexp create one. This is the
2841 friendliest fallback if the user calls __builtin_cexpi
2842 without full target C99 function support. */
2843 if (fn == NULL_TREE)
2845 tree fntype;
2846 const char *name = NULL;
2848 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2849 name = "cexpf";
2850 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2851 name = "cexp";
2852 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2853 name = "cexpl";
2855 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2856 fn = build_fn_decl (name, fntype);
2859 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2860 build_real (type, dconst0), arg);
2862 /* Make sure not to fold the cexp call again. */
2863 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2864 return expand_expr (build_call_nary (ctype, call, 1, narg),
2865 target, VOIDmode, EXPAND_NORMAL);
2868 /* Now build the proper return type. */
2869 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2870 make_tree (TREE_TYPE (arg), op2),
2871 make_tree (TREE_TYPE (arg), op1)),
2872 target, VOIDmode, EXPAND_NORMAL);
2875 /* Conveniently construct a function call expression. FNDECL names the
2876 function to be called, N is the number of arguments, and the "..."
2877 parameters are the argument expressions. Unlike build_call_exr
2878 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2880 static tree
2881 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2883 va_list ap;
2884 tree fntype = TREE_TYPE (fndecl);
2885 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2887 va_start (ap, n);
2888 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2889 va_end (ap);
2890 SET_EXPR_LOCATION (fn, loc);
2891 return fn;
2894 /* Expand a call to one of the builtin rounding functions gcc defines
2895 as an extension (lfloor and lceil). As these are gcc extensions we
2896 do not need to worry about setting errno to EDOM.
2897 If expanding via optab fails, lower expression to (int)(floor(x)).
2898 EXP is the expression that is a call to the builtin function;
2899 if convenient, the result should be placed in TARGET. */
2901 static rtx
2902 expand_builtin_int_roundingfn (tree exp, rtx target)
2904 convert_optab builtin_optab;
2905 rtx op0, tmp;
2906 rtx_insn *insns;
2907 tree fndecl = get_callee_fndecl (exp);
2908 enum built_in_function fallback_fn;
2909 tree fallback_fndecl;
2910 machine_mode mode;
2911 tree arg;
2913 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2914 return NULL_RTX;
2916 arg = CALL_EXPR_ARG (exp, 0);
2918 switch (DECL_FUNCTION_CODE (fndecl))
2920 CASE_FLT_FN (BUILT_IN_ICEIL):
2921 CASE_FLT_FN (BUILT_IN_LCEIL):
2922 CASE_FLT_FN (BUILT_IN_LLCEIL):
2923 builtin_optab = lceil_optab;
2924 fallback_fn = BUILT_IN_CEIL;
2925 break;
2927 CASE_FLT_FN (BUILT_IN_IFLOOR):
2928 CASE_FLT_FN (BUILT_IN_LFLOOR):
2929 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2930 builtin_optab = lfloor_optab;
2931 fallback_fn = BUILT_IN_FLOOR;
2932 break;
2934 default:
2935 gcc_unreachable ();
2938 /* Make a suitable register to place result in. */
2939 mode = TYPE_MODE (TREE_TYPE (exp));
2941 target = gen_reg_rtx (mode);
2943 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2944 need to expand the argument again. This way, we will not perform
2945 side-effects more the once. */
2946 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2948 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2950 start_sequence ();
2952 /* Compute into TARGET. */
2953 if (expand_sfix_optab (target, op0, builtin_optab))
2955 /* Output the entire sequence. */
2956 insns = get_insns ();
2957 end_sequence ();
2958 emit_insn (insns);
2959 return target;
2962 /* If we were unable to expand via the builtin, stop the sequence
2963 (without outputting the insns). */
2964 end_sequence ();
2966 /* Fall back to floating point rounding optab. */
2967 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2969 /* For non-C99 targets we may end up without a fallback fndecl here
2970 if the user called __builtin_lfloor directly. In this case emit
2971 a call to the floor/ceil variants nevertheless. This should result
2972 in the best user experience for not full C99 targets. */
2973 if (fallback_fndecl == NULL_TREE)
2975 tree fntype;
2976 const char *name = NULL;
2978 switch (DECL_FUNCTION_CODE (fndecl))
2980 case BUILT_IN_ICEIL:
2981 case BUILT_IN_LCEIL:
2982 case BUILT_IN_LLCEIL:
2983 name = "ceil";
2984 break;
2985 case BUILT_IN_ICEILF:
2986 case BUILT_IN_LCEILF:
2987 case BUILT_IN_LLCEILF:
2988 name = "ceilf";
2989 break;
2990 case BUILT_IN_ICEILL:
2991 case BUILT_IN_LCEILL:
2992 case BUILT_IN_LLCEILL:
2993 name = "ceill";
2994 break;
2995 case BUILT_IN_IFLOOR:
2996 case BUILT_IN_LFLOOR:
2997 case BUILT_IN_LLFLOOR:
2998 name = "floor";
2999 break;
3000 case BUILT_IN_IFLOORF:
3001 case BUILT_IN_LFLOORF:
3002 case BUILT_IN_LLFLOORF:
3003 name = "floorf";
3004 break;
3005 case BUILT_IN_IFLOORL:
3006 case BUILT_IN_LFLOORL:
3007 case BUILT_IN_LLFLOORL:
3008 name = "floorl";
3009 break;
3010 default:
3011 gcc_unreachable ();
3014 fntype = build_function_type_list (TREE_TYPE (arg),
3015 TREE_TYPE (arg), NULL_TREE);
3016 fallback_fndecl = build_fn_decl (name, fntype);
3019 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
3021 tmp = expand_normal (exp);
3022 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
3024 /* Truncate the result of floating point optab to integer
3025 via expand_fix (). */
3026 target = gen_reg_rtx (mode);
3027 expand_fix (target, tmp, 0);
3029 return target;
3032 /* Expand a call to one of the builtin math functions doing integer
3033 conversion (lrint).
3034 Return 0 if a normal call should be emitted rather than expanding the
3035 function in-line. EXP is the expression that is a call to the builtin
3036 function; if convenient, the result should be placed in TARGET. */
3038 static rtx
3039 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
3041 convert_optab builtin_optab;
3042 rtx op0;
3043 rtx_insn *insns;
3044 tree fndecl = get_callee_fndecl (exp);
3045 tree arg;
3046 machine_mode mode;
3047 enum built_in_function fallback_fn = BUILT_IN_NONE;
3049 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3050 return NULL_RTX;
3052 arg = CALL_EXPR_ARG (exp, 0);
3054 switch (DECL_FUNCTION_CODE (fndecl))
3056 CASE_FLT_FN (BUILT_IN_IRINT):
3057 fallback_fn = BUILT_IN_LRINT;
3058 gcc_fallthrough ();
3059 CASE_FLT_FN (BUILT_IN_LRINT):
3060 CASE_FLT_FN (BUILT_IN_LLRINT):
3061 builtin_optab = lrint_optab;
3062 break;
3064 CASE_FLT_FN (BUILT_IN_IROUND):
3065 fallback_fn = BUILT_IN_LROUND;
3066 gcc_fallthrough ();
3067 CASE_FLT_FN (BUILT_IN_LROUND):
3068 CASE_FLT_FN (BUILT_IN_LLROUND):
3069 builtin_optab = lround_optab;
3070 break;
3072 default:
3073 gcc_unreachable ();
3076 /* There's no easy way to detect the case we need to set EDOM. */
3077 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
3078 return NULL_RTX;
3080 /* Make a suitable register to place result in. */
3081 mode = TYPE_MODE (TREE_TYPE (exp));
3083 /* There's no easy way to detect the case we need to set EDOM. */
3084 if (!flag_errno_math)
3086 rtx result = gen_reg_rtx (mode);
3088 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3089 need to expand the argument again. This way, we will not perform
3090 side-effects more the once. */
3091 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3093 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
3095 start_sequence ();
3097 if (expand_sfix_optab (result, op0, builtin_optab))
3099 /* Output the entire sequence. */
3100 insns = get_insns ();
3101 end_sequence ();
3102 emit_insn (insns);
3103 return result;
3106 /* If we were unable to expand via the builtin, stop the sequence
3107 (without outputting the insns) and call to the library function
3108 with the stabilized argument list. */
3109 end_sequence ();
3112 if (fallback_fn != BUILT_IN_NONE)
3114 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
3115 targets, (int) round (x) should never be transformed into
3116 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
3117 a call to lround in the hope that the target provides at least some
3118 C99 functions. This should result in the best user experience for
3119 not full C99 targets. */
3120 tree fallback_fndecl = mathfn_built_in_1
3121 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
3123 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
3124 fallback_fndecl, 1, arg);
3126 target = expand_call (exp, NULL_RTX, target == const0_rtx);
3127 target = maybe_emit_group_store (target, TREE_TYPE (exp));
3128 return convert_to_mode (mode, target, 0);
3131 return expand_call (exp, target, target == const0_rtx);
3134 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3135 a normal call should be emitted rather than expanding the function
3136 in-line. EXP is the expression that is a call to the builtin
3137 function; if convenient, the result should be placed in TARGET. */
3139 static rtx
3140 expand_builtin_powi (tree exp, rtx target)
3142 tree arg0, arg1;
3143 rtx op0, op1;
3144 machine_mode mode;
3145 machine_mode mode2;
3147 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3148 return NULL_RTX;
3150 arg0 = CALL_EXPR_ARG (exp, 0);
3151 arg1 = CALL_EXPR_ARG (exp, 1);
3152 mode = TYPE_MODE (TREE_TYPE (exp));
3154 /* Emit a libcall to libgcc. */
3156 /* Mode of the 2nd argument must match that of an int. */
3157 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
3159 if (target == NULL_RTX)
3160 target = gen_reg_rtx (mode);
3162 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
3163 if (GET_MODE (op0) != mode)
3164 op0 = convert_to_mode (mode, op0, 0);
3165 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3166 if (GET_MODE (op1) != mode2)
3167 op1 = convert_to_mode (mode2, op1, 0);
3169 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3170 target, LCT_CONST, mode,
3171 op0, mode, op1, mode2);
3173 return target;
3176 /* Expand expression EXP which is a call to the strlen builtin. Return
3177 NULL_RTX if we failed and the caller should emit a normal call, otherwise
3178 try to get the result in TARGET, if convenient. */
3180 static rtx
3181 expand_builtin_strlen (tree exp, rtx target,
3182 machine_mode target_mode)
3184 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3185 return NULL_RTX;
3187 tree src = CALL_EXPR_ARG (exp, 0);
3188 if (!check_read_access (exp, src))
3189 return NULL_RTX;
3191 /* If the length can be computed at compile-time, return it. */
3192 if (tree len = c_strlen (src, 0))
3193 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3195 /* If the length can be computed at compile-time and is constant
3196 integer, but there are side-effects in src, evaluate
3197 src for side-effects, then return len.
3198 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3199 can be optimized into: i++; x = 3; */
3200 tree len = c_strlen (src, 1);
3201 if (len && TREE_CODE (len) == INTEGER_CST)
3203 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3204 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3207 unsigned int align = get_pointer_alignment (src) / BITS_PER_UNIT;
3209 /* If SRC is not a pointer type, don't do this operation inline. */
3210 if (align == 0)
3211 return NULL_RTX;
3213 /* Bail out if we can't compute strlen in the right mode. */
3214 machine_mode insn_mode;
3215 enum insn_code icode = CODE_FOR_nothing;
3216 FOR_EACH_MODE_FROM (insn_mode, target_mode)
3218 icode = optab_handler (strlen_optab, insn_mode);
3219 if (icode != CODE_FOR_nothing)
3220 break;
3222 if (insn_mode == VOIDmode)
3223 return NULL_RTX;
3225 /* Make a place to hold the source address. We will not expand
3226 the actual source until we are sure that the expansion will
3227 not fail -- there are trees that cannot be expanded twice. */
3228 rtx src_reg = gen_reg_rtx (Pmode);
3230 /* Mark the beginning of the strlen sequence so we can emit the
3231 source operand later. */
3232 rtx_insn *before_strlen = get_last_insn ();
3234 class expand_operand ops[4];
3235 create_output_operand (&ops[0], target, insn_mode);
3236 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3237 create_integer_operand (&ops[2], 0);
3238 create_integer_operand (&ops[3], align);
3239 if (!maybe_expand_insn (icode, 4, ops))
3240 return NULL_RTX;
3242 /* Check to see if the argument was declared attribute nonstring
3243 and if so, issue a warning since at this point it's not known
3244 to be nul-terminated. */
3245 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3247 /* Now that we are assured of success, expand the source. */
3248 start_sequence ();
3249 rtx pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3250 if (pat != src_reg)
3252 #ifdef POINTERS_EXTEND_UNSIGNED
3253 if (GET_MODE (pat) != Pmode)
3254 pat = convert_to_mode (Pmode, pat,
3255 POINTERS_EXTEND_UNSIGNED);
3256 #endif
3257 emit_move_insn (src_reg, pat);
3259 pat = get_insns ();
3260 end_sequence ();
3262 if (before_strlen)
3263 emit_insn_after (pat, before_strlen);
3264 else
3265 emit_insn_before (pat, get_insns ());
3267 /* Return the value in the proper mode for this function. */
3268 if (GET_MODE (ops[0].value) == target_mode)
3269 target = ops[0].value;
3270 else if (target != 0)
3271 convert_move (target, ops[0].value, 0);
3272 else
3273 target = convert_to_mode (target_mode, ops[0].value, 0);
3275 return target;
3278 /* Expand call EXP to the strnlen built-in, returning the result
3279 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3281 static rtx
3282 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3284 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3285 return NULL_RTX;
3287 tree src = CALL_EXPR_ARG (exp, 0);
3288 tree bound = CALL_EXPR_ARG (exp, 1);
3290 if (!bound)
3291 return NULL_RTX;
3293 check_read_access (exp, src, bound);
3295 location_t loc = UNKNOWN_LOCATION;
3296 if (EXPR_HAS_LOCATION (exp))
3297 loc = EXPR_LOCATION (exp);
3299 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3300 so these conversions aren't necessary. */
3301 c_strlen_data lendata = { };
3302 tree len = c_strlen (src, 0, &lendata, 1);
3303 if (len)
3304 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3306 if (TREE_CODE (bound) == INTEGER_CST)
3308 if (!len)
3309 return NULL_RTX;
3311 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3312 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3315 if (TREE_CODE (bound) != SSA_NAME)
3316 return NULL_RTX;
3318 wide_int min, max;
3319 enum value_range_kind rng = get_range_info (bound, &min, &max);
3320 if (rng != VR_RANGE)
3321 return NULL_RTX;
3323 if (!len || TREE_CODE (len) != INTEGER_CST)
3325 bool exact;
3326 lendata.decl = unterminated_array (src, &len, &exact);
3327 if (!lendata.decl)
3328 return NULL_RTX;
3331 if (lendata.decl)
3332 return NULL_RTX;
3334 if (wi::gtu_p (min, wi::to_wide (len)))
3335 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3337 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3338 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3341 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3342 bytes from bytes at DATA + OFFSET and return it reinterpreted as
3343 a target constant. */
3345 static rtx
3346 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3347 scalar_int_mode mode)
3349 /* The REPresentation pointed to by DATA need not be a nul-terminated
3350 string but the caller guarantees it's large enough for MODE. */
3351 const char *rep = (const char *) data;
3353 return c_readstr (rep + offset, mode, /*nul_terminated=*/false);
3356 /* LEN specify length of the block of memcpy/memset operation.
3357 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3358 In some cases we can make very likely guess on max size, then we
3359 set it into PROBABLE_MAX_SIZE. */
3361 static void
3362 determine_block_size (tree len, rtx len_rtx,
3363 unsigned HOST_WIDE_INT *min_size,
3364 unsigned HOST_WIDE_INT *max_size,
3365 unsigned HOST_WIDE_INT *probable_max_size)
3367 if (CONST_INT_P (len_rtx))
3369 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3370 return;
3372 else
3374 wide_int min, max;
3375 enum value_range_kind range_type = VR_UNDEFINED;
3377 /* Determine bounds from the type. */
3378 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3379 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3380 else
3381 *min_size = 0;
3382 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3383 *probable_max_size = *max_size
3384 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3385 else
3386 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3388 if (TREE_CODE (len) == SSA_NAME)
3389 range_type = get_range_info (len, &min, &max);
3390 if (range_type == VR_RANGE)
3392 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3393 *min_size = min.to_uhwi ();
3394 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3395 *probable_max_size = *max_size = max.to_uhwi ();
3397 else if (range_type == VR_ANTI_RANGE)
3399 /* Code like
3401 int n;
3402 if (n < 100)
3403 memcpy (a, b, n)
3405 Produce anti range allowing negative values of N. We still
3406 can use the information and make a guess that N is not negative.
3408 if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3409 *probable_max_size = min.to_uhwi () - 1;
3412 gcc_checking_assert (*max_size <=
3413 (unsigned HOST_WIDE_INT)
3414 GET_MODE_MASK (GET_MODE (len_rtx)));
3417 /* Issue a warning OPT for a bounded call EXP with a bound in RANGE
3418 accessing an object with SIZE. */
3420 static bool
3421 maybe_warn_for_bound (int opt, location_t loc, tree exp, tree func,
3422 tree bndrng[2], tree size, const access_data *pad = NULL)
3424 if (!bndrng[0] || TREE_NO_WARNING (exp))
3425 return false;
3427 tree maxobjsize = max_object_size ();
3429 bool warned = false;
3431 if (opt == OPT_Wstringop_overread)
3433 if (tree_int_cst_lt (maxobjsize, bndrng[0]))
3435 if (bndrng[0] == bndrng[1])
3436 warned = (func
3437 ? warning_at (loc, opt,
3438 "%K%qD specified bound %E "
3439 "exceeds maximum object size %E",
3440 exp, func, bndrng[0], maxobjsize)
3441 : warning_at (loc, opt,
3442 "%Kspecified bound %E "
3443 "exceeds maximum object size %E",
3444 exp, bndrng[0], maxobjsize));
3445 else
3446 warned = (func
3447 ? warning_at (loc, opt,
3448 "%K%qD specified bound [%E, %E] "
3449 "exceeds maximum object size %E",
3450 exp, func,
3451 bndrng[0], bndrng[1], maxobjsize)
3452 : warning_at (loc, opt,
3453 "%Kspecified bound [%E, %E] "
3454 "exceeds maximum object size %E",
3455 exp, bndrng[0], bndrng[1], maxobjsize));
3457 else if (!size || tree_int_cst_le (bndrng[0], size))
3458 return false;
3459 else if (tree_int_cst_equal (bndrng[0], bndrng[1]))
3460 warned = (func
3461 ? warning_at (loc, opt,
3462 "%K%qD specified bound %E exceeds "
3463 "source size %E",
3464 exp, func, bndrng[0], size)
3465 : warning_at (loc, opt,
3466 "%Kspecified bound %E exceeds "
3467 "source size %E",
3468 exp, bndrng[0], size));
3469 else
3470 warned = (func
3471 ? warning_at (loc, opt,
3472 "%K%qD specified bound [%E, %E] exceeds "
3473 "source size %E",
3474 exp, func, bndrng[0], bndrng[1], size)
3475 : warning_at (loc, opt,
3476 "%Kspecified bound [%E, %E] exceeds "
3477 "source size %E",
3478 exp, bndrng[0], bndrng[1], size));
3479 if (warned)
3481 if (pad && pad->src.ref)
3483 if (DECL_P (pad->src.ref))
3484 inform (DECL_SOURCE_LOCATION (pad->src.ref),
3485 "source object declared here");
3486 else if (EXPR_HAS_LOCATION (pad->src.ref))
3487 inform (EXPR_LOCATION (pad->src.ref),
3488 "source object allocated here");
3490 TREE_NO_WARNING (exp) = true;
3493 return warned;
3496 if (tree_int_cst_lt (maxobjsize, bndrng[0]))
3498 if (bndrng[0] == bndrng[1])
3499 warned = (func
3500 ? warning_at (loc, opt,
3501 "%K%qD specified size %E "
3502 "exceeds maximum object size %E",
3503 exp, func, bndrng[0], maxobjsize)
3504 : warning_at (loc, opt,
3505 "%Kspecified size %E "
3506 "exceeds maximum object size %E",
3507 exp, bndrng[0], maxobjsize));
3508 else
3509 warned = (func
3510 ? warning_at (loc, opt,
3511 "%K%qD specified size between %E and %E "
3512 "exceeds maximum object size %E",
3513 exp, func,
3514 bndrng[0], bndrng[1], maxobjsize)
3515 : warning_at (loc, opt,
3516 "%Kspecified size between %E and %E "
3517 "exceeds maximum object size %E",
3518 exp, bndrng[0], bndrng[1], maxobjsize));
3520 else if (!size || tree_int_cst_le (bndrng[0], size))
3521 return false;
3522 else if (tree_int_cst_equal (bndrng[0], bndrng[1]))
3523 warned = (func
3524 ? warning_at (loc, OPT_Wstringop_overflow_,
3525 "%K%qD specified bound %E exceeds "
3526 "destination size %E",
3527 exp, func, bndrng[0], size)
3528 : warning_at (loc, OPT_Wstringop_overflow_,
3529 "%Kspecified bound %E exceeds "
3530 "destination size %E",
3531 exp, bndrng[0], size));
3532 else
3533 warned = (func
3534 ? warning_at (loc, OPT_Wstringop_overflow_,
3535 "%K%qD specified bound [%E, %E] exceeds "
3536 "destination size %E",
3537 exp, func, bndrng[0], bndrng[1], size)
3538 : warning_at (loc, OPT_Wstringop_overflow_,
3539 "%Kspecified bound [%E, %E] exceeds "
3540 "destination size %E",
3541 exp, bndrng[0], bndrng[1], size));
3543 if (warned)
3545 if (pad && pad->dst.ref)
3547 if (DECL_P (pad->dst.ref))
3548 inform (DECL_SOURCE_LOCATION (pad->dst.ref),
3549 "destination object declared here");
3550 else if (EXPR_HAS_LOCATION (pad->dst.ref))
3551 inform (EXPR_LOCATION (pad->dst.ref),
3552 "destination object allocated here");
3554 TREE_NO_WARNING (exp) = true;
3557 return warned;
3560 /* For an expression EXP issue an access warning controlled by option OPT
3561 with access to a region SIZE bytes in size in the RANGE of sizes.
3562 WRITE is true for a write access, READ for a read access, neither for
3563 call that may or may not perform an access but for which the range
3564 is expected to valid.
3565 Returns true when a warning has been issued. */
3567 static bool
3568 warn_for_access (location_t loc, tree func, tree exp, int opt, tree range[2],
3569 tree size, bool write, bool read)
3571 bool warned = false;
3573 if (write && read)
3575 if (tree_int_cst_equal (range[0], range[1]))
3576 warned = (func
3577 ? warning_n (loc, opt, tree_to_uhwi (range[0]),
3578 "%K%qD accessing %E byte in a region "
3579 "of size %E",
3580 "%K%qD accessing %E bytes in a region "
3581 "of size %E",
3582 exp, func, range[0], size)
3583 : warning_n (loc, opt, tree_to_uhwi (range[0]),
3584 "%Kaccessing %E byte in a region "
3585 "of size %E",
3586 "%Kaccessing %E bytes in a region "
3587 "of size %E",
3588 exp, range[0], size));
3589 else if (tree_int_cst_sign_bit (range[1]))
3591 /* Avoid printing the upper bound if it's invalid. */
3592 warned = (func
3593 ? warning_at (loc, opt,
3594 "%K%qD accessing %E or more bytes in "
3595 "a region of size %E",
3596 exp, func, range[0], size)
3597 : warning_at (loc, opt,
3598 "%Kaccessing %E or more bytes in "
3599 "a region of size %E",
3600 exp, range[0], size));
3602 else
3603 warned = (func
3604 ? warning_at (loc, opt,
3605 "%K%qD accessing between %E and %E bytes "
3606 "in a region of size %E",
3607 exp, func, range[0], range[1],
3608 size)
3609 : warning_at (loc, opt,
3610 "%Kaccessing between %E and %E bytes "
3611 "in a region of size %E",
3612 exp, range[0], range[1],
3613 size));
3614 return warned;
3617 if (write)
3619 if (tree_int_cst_equal (range[0], range[1]))
3620 warned = (func
3621 ? warning_n (loc, opt, tree_to_uhwi (range[0]),
3622 "%K%qD writing %E byte into a region "
3623 "of size %E overflows the destination",
3624 "%K%qD writing %E bytes into a region "
3625 "of size %E overflows the destination",
3626 exp, func, range[0], size)
3627 : warning_n (loc, opt, tree_to_uhwi (range[0]),
3628 "%Kwriting %E byte into a region "
3629 "of size %E overflows the destination",
3630 "%Kwriting %E bytes into a region "
3631 "of size %E overflows the destination",
3632 exp, range[0], size));
3633 else if (tree_int_cst_sign_bit (range[1]))
3635 /* Avoid printing the upper bound if it's invalid. */
3636 warned = (func
3637 ? warning_at (loc, opt,
3638 "%K%qD writing %E or more bytes into "
3639 "a region of size %E overflows "
3640 "the destination",
3641 exp, func, range[0], size)
3642 : warning_at (loc, opt,
3643 "%Kwriting %E or more bytes into "
3644 "a region of size %E overflows "
3645 "the destination",
3646 exp, range[0], size));
3648 else
3649 warned = (func
3650 ? warning_at (loc, opt,
3651 "%K%qD writing between %E and %E bytes "
3652 "into a region of size %E overflows "
3653 "the destination",
3654 exp, func, range[0], range[1],
3655 size)
3656 : warning_at (loc, opt,
3657 "%Kwriting between %E and %E bytes "
3658 "into a region of size %E overflows "
3659 "the destination",
3660 exp, range[0], range[1],
3661 size));
3662 return warned;
3665 if (read)
3667 if (tree_int_cst_equal (range[0], range[1]))
3668 warned = (func
3669 ? warning_n (loc, OPT_Wstringop_overread,
3670 tree_to_uhwi (range[0]),
3671 "%K%qD reading %E byte from a region of size %E",
3672 "%K%qD reading %E bytes from a region of size %E", exp, func, range[0], size)
3673 : warning_n (loc, OPT_Wstringop_overread,
3674 tree_to_uhwi (range[0]),
3675 "%Kreading %E byte from a region of size %E",
3676 "%Kreading %E bytes from a region of size %E",
3677 exp, range[0], size));
3678 else if (tree_int_cst_sign_bit (range[1]))
3680 /* Avoid printing the upper bound if it's invalid. */
3681 warned = (func
3682 ? warning_at (loc, OPT_Wstringop_overread,
3683 "%K%qD reading %E or more bytes from "
3684 "a region of size %E",
3685 exp, func, range[0], size)
3686 : warning_at (loc, OPT_Wstringop_overread,
3687 "%Kreading %E or more bytes from a region "
3688 "of size %E",
3689 exp, range[0], size));
3691 else
3692 warned = (func
3693 ? warning_at (loc, OPT_Wstringop_overread,
3694 "%K%qD reading between %E and %E bytes from "
3695 "a region of size %E",
3696 exp, func, range[0], range[1], size)
3697 : warning_at (loc, opt,
3698 "%K reading between %E and %E bytes from "
3699 "a region of size %E",
3700 exp, range[0], range[1], size));
3702 if (warned)
3703 TREE_NO_WARNING (exp) = true;
3705 return warned;
3708 if (tree_int_cst_equal (range[0], range[1])
3709 || tree_int_cst_sign_bit (range[1]))
3710 warned = (func
3711 ? warning_n (loc, OPT_Wstringop_overread,
3712 tree_to_uhwi (range[0]),
3713 "%K%qD epecting %E byte in a region of size %E",
3714 "%K%qD expecting %E bytes in a region of size %E",
3715 exp, func, range[0], size)
3716 : warning_n (loc, OPT_Wstringop_overread,
3717 tree_to_uhwi (range[0]),
3718 "%Kexpecting %E byte in a region of size %E",
3719 "%Kexpecting %E bytes in a region of size %E",
3720 exp, range[0], size));
3721 else if (tree_int_cst_sign_bit (range[1]))
3723 /* Avoid printing the upper bound if it's invalid. */
3724 warned = (func
3725 ? warning_at (loc, OPT_Wstringop_overread,
3726 "%K%qD expecting %E or more bytes in a region "
3727 "of size %E",
3728 exp, func, range[0], size)
3729 : warning_at (loc, OPT_Wstringop_overread,
3730 "%Kexpecting %E or more bytes in a region "
3731 "of size %E",
3732 exp, range[0], size));
3734 else
3735 warned = (func
3736 ? warning_at (loc, OPT_Wstringop_overread,
3737 "%K%qD expecting between %E and %E bytes in "
3738 "a region of size %E",
3739 exp, func, range[0], range[1], size)
3740 : warning_at (loc, OPT_Wstringop_overread,
3741 "%Kexpectting between %E and %E bytes in "
3742 "a region of size %E",
3743 exp, range[0], range[1], size));
3745 if (warned)
3746 TREE_NO_WARNING (exp) = true;
3748 return warned;
3751 /* Issue an inform message describing the target of an access REF.
3752 WRITE is set for a write access and clear for a read access. */
3754 static void
3755 inform_access (const access_ref &ref, access_mode mode)
3757 if (!ref.ref)
3758 return;
3760 /* Convert offset range and avoid including a zero range since it isn't
3761 necessarily meaningful. */
3762 long long minoff = 0, maxoff = 0;
3763 if (wi::fits_shwi_p (ref.offrng[0])
3764 && wi::fits_shwi_p (ref.offrng[1]))
3766 minoff = ref.offrng[0].to_shwi ();
3767 maxoff = ref.offrng[1].to_shwi ();
3770 /* Convert size range and always include it since all sizes are
3771 meaningful. */
3772 unsigned long long minsize = 0, maxsize = 0;
3773 if (wi::fits_shwi_p (ref.sizrng[0])
3774 && wi::fits_shwi_p (ref.sizrng[1]))
3776 minsize = ref.sizrng[0].to_shwi ();
3777 maxsize = ref.sizrng[1].to_shwi ();
3780 char sizestr[80];
3781 location_t loc;
3782 tree allocfn = NULL_TREE;
3783 if (TREE_CODE (ref.ref) == SSA_NAME)
3785 gimple *stmt = SSA_NAME_DEF_STMT (ref.ref);
3786 gcc_assert (is_gimple_call (stmt));
3787 loc = gimple_location (stmt);
3788 allocfn = gimple_call_fndecl (stmt);
3789 if (!allocfn)
3790 /* Handle calls through pointers to functions. */
3791 allocfn = gimple_call_fn (stmt);
3793 /* SIZRNG doesn't necessarily have the same range as the allocation
3794 size determined by gimple_call_alloc_size (). */
3796 if (minsize == maxsize)
3797 sprintf (sizestr, "%llu", minsize);
3798 else
3799 sprintf (sizestr, "[%llu, %llu]", minsize, maxsize);
3802 else
3803 loc = DECL_SOURCE_LOCATION (ref.ref);
3805 if (mode == access_read_write || mode == access_write_only)
3807 if (DECL_P (ref.ref))
3809 if (minoff == maxoff)
3811 if (minoff == 0)
3812 inform (loc, "destination object %qD", ref.ref);
3813 else
3814 inform (loc, "at offset %lli into destination object %qD",
3815 minoff, ref.ref);
3817 else
3818 inform (loc, "at offset [%lli, %lli] into destination object %qD",
3819 minoff, maxoff, ref.ref);
3820 return;
3823 if (minoff == maxoff)
3825 if (minoff == 0)
3826 inform (loc, "destination object of size %s allocated by %qE",
3827 sizestr, allocfn);
3828 else
3829 inform (loc,
3830 "at offset %lli into destination object of size %s "
3831 "allocated by %qE", minoff, sizestr, allocfn);
3833 else
3834 inform (loc,
3835 "at offset [%lli, %lli] into destination object of size %s "
3836 "allocated by %qE",
3837 minoff, maxoff, sizestr, allocfn);
3839 return;
3842 if (DECL_P (ref.ref))
3844 if (minoff == maxoff)
3846 if (minoff == 0)
3847 inform (loc, "source object %qD", ref.ref);
3848 else
3849 inform (loc, "at offset %lli into source object %qD",
3850 minoff, ref.ref);
3852 else
3853 inform (loc, "at offset [%lli, %lli] into source object %qD",
3854 minoff, maxoff, ref.ref);
3855 return;
3858 if (minoff == maxoff)
3860 if (minoff == 0)
3861 inform (loc, "source object of size %s allocated by %qE",
3862 sizestr, allocfn);
3863 else
3864 inform (loc,
3865 "at offset %lli into source object of size %s "
3866 "allocated by %qE", minoff, sizestr, allocfn);
3868 else
3869 inform (loc,
3870 "at offset [%lli, %lli] into source object of size %s "
3871 "allocated by %qE",
3872 minoff, maxoff, sizestr, allocfn);
3875 /* Helper to set RANGE to the range of BOUND if it's nonnull, bounded
3876 by BNDRNG if nonnull and valid. */
3878 static void
3879 get_size_range (tree bound, tree range[2], const offset_int bndrng[2])
3881 if (bound)
3882 get_size_range (bound, range);
3884 if (!bndrng || (bndrng[0] == 0 && bndrng[1] == HOST_WIDE_INT_M1U))
3885 return;
3887 if (range[0] && TREE_CODE (range[0]) == INTEGER_CST)
3889 offset_int r[] =
3890 { wi::to_offset (range[0]), wi::to_offset (range[1]) };
3891 if (r[0] < bndrng[0])
3892 range[0] = wide_int_to_tree (sizetype, bndrng[0]);
3893 if (bndrng[1] < r[1])
3894 range[1] = wide_int_to_tree (sizetype, bndrng[1]);
3896 else
3898 range[0] = wide_int_to_tree (sizetype, bndrng[0]);
3899 range[1] = wide_int_to_tree (sizetype, bndrng[1]);
3903 /* Try to verify that the sizes and lengths of the arguments to a string
3904 manipulation function given by EXP are within valid bounds and that
3905 the operation does not lead to buffer overflow or read past the end.
3906 Arguments other than EXP may be null. When non-null, the arguments
3907 have the following meaning:
3908 DST is the destination of a copy call or NULL otherwise.
3909 SRC is the source of a copy call or NULL otherwise.
3910 DSTWRITE is the number of bytes written into the destination obtained
3911 from the user-supplied size argument to the function (such as in
3912 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3913 MAXREAD is the user-supplied bound on the length of the source sequence
3914 (such as in strncat(d, s, N). It specifies the upper limit on the number
3915 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3916 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3917 expression EXP is a string function call (as opposed to a memory call
3918 like memcpy). As an exception, SRCSTR can also be an integer denoting
3919 the precomputed size of the source string or object (for functions like
3920 memcpy).
3921 DSTSIZE is the size of the destination object.
3923 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3924 SIZE_MAX.
3926 WRITE is true for write accesses, READ is true for reads. Both are
3927 false for simple size checks in calls to functions that neither read
3928 from nor write to the region.
3930 When nonnull, PAD points to a more detailed description of the access.
3932 If the call is successfully verified as safe return true, otherwise
3933 return false. */
3935 bool
3936 check_access (tree exp, tree dstwrite,
3937 tree maxread, tree srcstr, tree dstsize,
3938 access_mode mode, const access_data *pad /* = NULL */)
3940 /* The size of the largest object is half the address space, or
3941 PTRDIFF_MAX. (This is way too permissive.) */
3942 tree maxobjsize = max_object_size ();
3944 /* Either an approximate/minimum the length of the source string for
3945 string functions or the size of the source object for raw memory
3946 functions. */
3947 tree slen = NULL_TREE;
3949 /* The range of the access in bytes; first set to the write access
3950 for functions that write and then read for those that also (or
3951 just) read. */
3952 tree range[2] = { NULL_TREE, NULL_TREE };
3954 /* Set to true when the exact number of bytes written by a string
3955 function like strcpy is not known and the only thing that is
3956 known is that it must be at least one (for the terminating nul). */
3957 bool at_least_one = false;
3958 if (srcstr)
3960 /* SRCSTR is normally a pointer to string but as a special case
3961 it can be an integer denoting the length of a string. */
3962 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3964 if (!check_nul_terminated_array (exp, srcstr, maxread))
3965 return false;
3966 /* Try to determine the range of lengths the source string
3967 refers to. If it can be determined and is less than
3968 the upper bound given by MAXREAD add one to it for
3969 the terminating nul. Otherwise, set it to one for
3970 the same reason, or to MAXREAD as appropriate. */
3971 c_strlen_data lendata = { };
3972 get_range_strlen (srcstr, &lendata, /* eltsize = */ 1);
3973 range[0] = lendata.minlen;
3974 range[1] = lendata.maxbound ? lendata.maxbound : lendata.maxlen;
3975 if (range[0]
3976 && TREE_CODE (range[0]) == INTEGER_CST
3977 && TREE_CODE (range[1]) == INTEGER_CST
3978 && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3980 if (maxread && tree_int_cst_le (maxread, range[0]))
3981 range[0] = range[1] = maxread;
3982 else
3983 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3984 range[0], size_one_node);
3986 if (maxread && tree_int_cst_le (maxread, range[1]))
3987 range[1] = maxread;
3988 else if (!integer_all_onesp (range[1]))
3989 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3990 range[1], size_one_node);
3992 slen = range[0];
3994 else
3996 at_least_one = true;
3997 slen = size_one_node;
4000 else
4001 slen = srcstr;
4004 if (!dstwrite && !maxread)
4006 /* When the only available piece of data is the object size
4007 there is nothing to do. */
4008 if (!slen)
4009 return true;
4011 /* Otherwise, when the length of the source sequence is known
4012 (as with strlen), set DSTWRITE to it. */
4013 if (!range[0])
4014 dstwrite = slen;
4017 if (!dstsize)
4018 dstsize = maxobjsize;
4020 /* Set RANGE to that of DSTWRITE if non-null, bounded by PAD->DST.BNDRNG
4021 if valid. */
4022 get_size_range (dstwrite, range, pad ? pad->dst.bndrng : NULL);
4024 tree func = get_callee_fndecl (exp);
4025 /* Read vs write access by built-ins can be determined from the const
4026 qualifiers on the pointer argument. In the absence of attribute
4027 access, non-const qualified pointer arguments to user-defined
4028 functions are assumed to both read and write the objects. */
4029 const bool builtin = func ? fndecl_built_in_p (func) : false;
4031 /* First check the number of bytes to be written against the maximum
4032 object size. */
4033 if (range[0]
4034 && TREE_CODE (range[0]) == INTEGER_CST
4035 && tree_int_cst_lt (maxobjsize, range[0]))
4037 location_t loc = tree_nonartificial_location (exp);
4038 loc = expansion_point_location_if_in_system_header (loc);
4040 maybe_warn_for_bound (OPT_Wstringop_overflow_, loc, exp, func, range,
4041 NULL_TREE, pad);
4042 return false;
4045 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
4046 constant, and in range of unsigned HOST_WIDE_INT. */
4047 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
4049 /* Next check the number of bytes to be written against the destination
4050 object size. */
4051 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
4053 if (range[0]
4054 && TREE_CODE (range[0]) == INTEGER_CST
4055 && ((tree_fits_uhwi_p (dstsize)
4056 && tree_int_cst_lt (dstsize, range[0]))
4057 || (dstwrite
4058 && tree_fits_uhwi_p (dstwrite)
4059 && tree_int_cst_lt (dstwrite, range[0]))))
4061 if (TREE_NO_WARNING (exp)
4062 || (pad && pad->dst.ref && TREE_NO_WARNING (pad->dst.ref)))
4063 return false;
4065 location_t loc = tree_nonartificial_location (exp);
4066 loc = expansion_point_location_if_in_system_header (loc);
4068 bool warned = false;
4069 if (dstwrite == slen && at_least_one)
4071 /* This is a call to strcpy with a destination of 0 size
4072 and a source of unknown length. The call will write
4073 at least one byte past the end of the destination. */
4074 warned = (func
4075 ? warning_at (loc, OPT_Wstringop_overflow_,
4076 "%K%qD writing %E or more bytes into "
4077 "a region of size %E overflows "
4078 "the destination",
4079 exp, func, range[0], dstsize)
4080 : warning_at (loc, OPT_Wstringop_overflow_,
4081 "%Kwriting %E or more bytes into "
4082 "a region of size %E overflows "
4083 "the destination",
4084 exp, range[0], dstsize));
4086 else
4088 const bool read
4089 = mode == access_read_only || mode == access_read_write;
4090 const bool write
4091 = mode == access_write_only || mode == access_read_write;
4092 warned = warn_for_access (loc, func, exp,
4093 OPT_Wstringop_overflow_,
4094 range, dstsize,
4095 write, read && !builtin);
4098 if (warned)
4100 TREE_NO_WARNING (exp) = true;
4101 if (pad)
4102 inform_access (pad->dst, pad->mode);
4105 /* Return error when an overflow has been detected. */
4106 return false;
4110 /* Check the maximum length of the source sequence against the size
4111 of the destination object if known, or against the maximum size
4112 of an object. */
4113 if (maxread)
4115 /* Set RANGE to that of MAXREAD, bounded by PAD->SRC.BNDRNG if
4116 PAD is nonnull and BNDRNG is valid. */
4117 get_size_range (maxread, range, pad ? pad->src.bndrng : NULL);
4119 location_t loc = tree_nonartificial_location (exp);
4120 loc = expansion_point_location_if_in_system_header (loc);
4122 tree size = dstsize;
4123 if (pad && pad->mode == access_read_only)
4124 size = wide_int_to_tree (sizetype, pad->src.sizrng[1]);
4126 if (range[0] && maxread && tree_fits_uhwi_p (size))
4128 if (tree_int_cst_lt (maxobjsize, range[0]))
4130 maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp, func,
4131 range, size, pad);
4132 return false;
4135 if (size != maxobjsize && tree_int_cst_lt (size, range[0]))
4137 int opt = (dstwrite || mode != access_read_only
4138 ? OPT_Wstringop_overflow_
4139 : OPT_Wstringop_overread);
4140 maybe_warn_for_bound (opt, loc, exp, func, range, size, pad);
4141 return false;
4145 maybe_warn_nonstring_arg (func, exp);
4148 /* Check for reading past the end of SRC. */
4149 bool overread = (slen
4150 && slen == srcstr
4151 && dstwrite
4152 && range[0]
4153 && TREE_CODE (slen) == INTEGER_CST
4154 && tree_int_cst_lt (slen, range[0]));
4156 if (!overread && pad && pad->src.sizrng[1] >= 0 && pad->src.offrng[0] >= 0)
4158 /* Set RANGE to that of MAXREAD, bounded by PAD->SRC.BNDRNG if
4159 PAD is nonnull and BNDRNG is valid. */
4160 get_size_range (maxread, range, pad ? pad->src.bndrng : NULL);
4161 /* Set OVERREAD for reads starting just past the end of an object. */
4162 overread = pad->src.sizrng[1] - pad->src.offrng[0] < pad->src.bndrng[0];
4163 range[0] = wide_int_to_tree (sizetype, pad->src.bndrng[0]);
4164 slen = size_zero_node;
4167 if (overread)
4169 if (TREE_NO_WARNING (exp)
4170 || (srcstr && TREE_NO_WARNING (srcstr))
4171 || (pad && pad->src.ref && TREE_NO_WARNING (pad->src.ref)))
4172 return false;
4174 location_t loc = tree_nonartificial_location (exp);
4175 loc = expansion_point_location_if_in_system_header (loc);
4177 const bool read
4178 = mode == access_read_only || mode == access_read_write;
4179 if (warn_for_access (loc, func, exp, OPT_Wstringop_overread, range,
4180 slen, false, read))
4182 TREE_NO_WARNING (exp) = true;
4183 if (pad)
4184 inform_access (pad->src, access_read_only);
4186 return false;
4189 return true;
4192 /* A convenience wrapper for check_access above to check access
4193 by a read-only function like puts. */
4195 static bool
4196 check_read_access (tree exp, tree src, tree bound /* = NULL_TREE */,
4197 int ost /* = 1 */)
4199 if (!warn_stringop_overread)
4200 return true;
4202 access_data data (exp, access_read_only, NULL_TREE, false, bound, true);
4203 compute_objsize (src, ost, &data.src);
4204 return check_access (exp, /*dstwrite=*/ NULL_TREE, /*maxread=*/ bound,
4205 /*srcstr=*/ src, /*dstsize=*/ NULL_TREE, data.mode,
4206 &data);
4209 /* If STMT is a call to an allocation function, returns the constant
4210 maximum size of the object allocated by the call represented as
4211 sizetype. If nonnull, sets RNG1[] to the range of the size.
4212 When nonnull, uses RVALS for range information, otherwise calls
4213 get_range_info to get it.
4214 Returns null when STMT is not a call to a valid allocation function. */
4216 tree
4217 gimple_call_alloc_size (gimple *stmt, wide_int rng1[2] /* = NULL */,
4218 range_query *rvals /* = NULL */)
4220 if (!stmt)
4221 return NULL_TREE;
4223 tree allocfntype;
4224 if (tree fndecl = gimple_call_fndecl (stmt))
4225 allocfntype = TREE_TYPE (fndecl);
4226 else
4227 allocfntype = gimple_call_fntype (stmt);
4229 if (!allocfntype)
4230 return NULL_TREE;
4232 unsigned argidx1 = UINT_MAX, argidx2 = UINT_MAX;
4233 tree at = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (allocfntype));
4234 if (!at)
4236 if (!gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
4237 return NULL_TREE;
4239 argidx1 = 0;
4242 unsigned nargs = gimple_call_num_args (stmt);
4244 if (argidx1 == UINT_MAX)
4246 tree atval = TREE_VALUE (at);
4247 if (!atval)
4248 return NULL_TREE;
4250 argidx1 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
4251 if (nargs <= argidx1)
4252 return NULL_TREE;
4254 atval = TREE_CHAIN (atval);
4255 if (atval)
4257 argidx2 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
4258 if (nargs <= argidx2)
4259 return NULL_TREE;
4263 tree size = gimple_call_arg (stmt, argidx1);
4265 wide_int rng1_buf[2];
4266 /* If RNG1 is not set, use the buffer. */
4267 if (!rng1)
4268 rng1 = rng1_buf;
4270 const int prec = ADDR_MAX_PRECISION;
4271 const tree size_max = TYPE_MAX_VALUE (sizetype);
4272 if (!get_range (size, stmt, rng1, rvals))
4274 /* Use the full non-negative range on failure. */
4275 rng1[0] = wi::zero (prec);
4276 rng1[1] = wi::to_wide (size_max, prec);
4279 if (argidx2 > nargs && TREE_CODE (size) == INTEGER_CST)
4280 return fold_convert (sizetype, size);
4282 /* To handle ranges do the math in wide_int and return the product
4283 of the upper bounds as a constant. Ignore anti-ranges. */
4284 tree n = argidx2 < nargs ? gimple_call_arg (stmt, argidx2) : integer_one_node;
4285 wide_int rng2[2];
4286 if (!get_range (n, stmt, rng2, rvals))
4288 /* As above, use the full non-negative range on failure. */
4289 rng2[0] = wi::zero (prec);
4290 rng2[1] = wi::to_wide (size_max, prec);
4293 /* Extend to the maximum precision to avoid overflow. */
4294 rng1[0] = wide_int::from (rng1[0], prec, UNSIGNED);
4295 rng1[1] = wide_int::from (rng1[1], prec, UNSIGNED);
4296 rng2[0] = wide_int::from (rng2[0], prec, UNSIGNED);
4297 rng2[1] = wide_int::from (rng2[1], prec, UNSIGNED);
4299 /* Compute products of both bounds for the caller but return the lesser
4300 of SIZE_MAX and the product of the upper bounds as a constant. */
4301 rng1[0] = rng1[0] * rng2[0];
4302 rng1[1] = rng1[1] * rng2[1];
4303 if (wi::gtu_p (rng1[1], wi::to_wide (size_max, prec)))
4305 rng1[1] = wi::to_wide (size_max);
4306 return size_max;
4309 return wide_int_to_tree (sizetype, rng1[1]);
4312 /* For an access to an object referenced to by the function parameter PTR
4313 of pointer type, and set RNG[] to the range of sizes of the object
4314 obtainedfrom the attribute access specification for the current function.
4315 Return the function parameter on success and null otherwise. */
4317 tree
4318 gimple_parm_array_size (tree ptr, wide_int rng[2], range_query * /* = NULL */)
4320 /* For a function argument try to determine the byte size of the array
4321 from the current function declaratation (e.g., attribute access or
4322 related). */
4323 tree var = SSA_NAME_VAR (ptr);
4324 if (TREE_CODE (var) != PARM_DECL)
4325 return NULL_TREE;
4327 const unsigned prec = TYPE_PRECISION (sizetype);
4329 rdwr_map rdwr_idx;
4330 attr_access *access = get_parm_access (rdwr_idx, var);
4331 if (!access)
4332 return NULL_TREE;
4334 if (access->sizarg != UINT_MAX)
4336 /* TODO: Try to extract the range from the argument based on
4337 those of subsequent assertions or based on known calls to
4338 the current function. */
4339 return NULL_TREE;
4342 if (!access->minsize)
4343 return NULL_TREE;
4345 /* Only consider ordinary array bound at level 2 (or above if it's
4346 ever added). */
4347 if (warn_array_parameter < 2 && !access->static_p)
4348 return NULL_TREE;
4350 rng[0] = wi::zero (prec);
4351 rng[1] = wi::uhwi (access->minsize, prec);
4352 /* If the PTR argument points to an array multiply MINSIZE by the size
4353 of array element type. Otherwise, multiply it by the size of what
4354 the pointer points to. */
4355 tree eltype = TREE_TYPE (TREE_TYPE (ptr));
4356 if (TREE_CODE (eltype) == ARRAY_TYPE)
4357 eltype = TREE_TYPE (eltype);
4358 tree size = TYPE_SIZE_UNIT (eltype);
4359 if (!size || TREE_CODE (size) != INTEGER_CST)
4360 return NULL_TREE;
4362 rng[1] *= wi::to_wide (size, prec);
4363 return var;
4366 /* Wrapper around the wide_int overload of get_range. Returns the same
4367 result but accepts offset_int instead. */
4369 static bool
4370 get_range (tree x, gimple *stmt, signop sgn, offset_int r[2],
4371 range_query *rvals /* = NULL */)
4373 wide_int wr[2];
4374 if (!get_range (x, stmt, wr, rvals))
4375 return false;
4377 r[0] = offset_int::from (wr[0], sgn);
4378 r[1] = offset_int::from (wr[1], sgn);
4379 return true;
4382 /* Helper to compute the size of the object referenced by the PTR
4383 expression which must have pointer type, using Object Size type
4384 OSTYPE (only the least significant 2 bits are used).
4385 On success, sets PREF->REF to the DECL of the referenced object
4386 if it's unique, otherwise to null, PREF->OFFRNG to the range of
4387 offsets into it, and PREF->SIZRNG to the range of sizes of
4388 the object(s).
4389 VISITED is used to avoid visiting the same PHI operand multiple
4390 times, and, when nonnull, RVALS to determine range information.
4391 Returns true on success, false when the size cannot be determined.
4393 The function is intended for diagnostics and should not be used
4394 to influence code generation or optimization. */
4396 static bool
4397 compute_objsize (tree ptr, int ostype, access_ref *pref,
4398 bitmap *visited, range_query *rvals /* = NULL */)
4400 const bool addr = TREE_CODE (ptr) == ADDR_EXPR;
4401 if (addr)
4402 ptr = TREE_OPERAND (ptr, 0);
4404 if (DECL_P (ptr))
4406 /* Bail if the reference is to the pointer itself (as opposed
4407 to what it points to). */
4408 if (!addr && POINTER_TYPE_P (TREE_TYPE (ptr)))
4409 return false;
4411 tree size = decl_init_size (ptr, false);
4412 if (!size || TREE_CODE (size) != INTEGER_CST)
4413 return false;
4415 pref->ref = ptr;
4416 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
4417 return true;
4420 const tree_code code = TREE_CODE (ptr);
4422 if (code == COMPONENT_REF)
4424 tree field = TREE_OPERAND (ptr, 1);
4426 if (ostype == 0)
4428 /* For raw memory functions like memcpy bail if the size
4429 of the enclosing object cannot be determined. */
4430 tree ref = TREE_OPERAND (ptr, 0);
4431 if (!compute_objsize (ref, ostype, pref, visited, rvals)
4432 || !pref->ref)
4433 return false;
4435 /* Otherwise, use the size of the enclosing object and add
4436 the offset of the member to the offset computed so far. */
4437 tree offset = byte_position (field);
4438 if (TREE_CODE (offset) != INTEGER_CST)
4439 return false;
4440 offset_int off = wi::to_offset (offset);
4441 pref->offrng[0] += off;
4442 pref->offrng[1] += off;
4443 return true;
4446 /* Bail if the reference is to the pointer itself (as opposed
4447 to what it points to). */
4448 if (!addr && POINTER_TYPE_P (TREE_TYPE (field)))
4449 return false;
4451 pref->ref = field;
4452 /* Only return constant sizes for now while callers depend
4453 on it. INT0LEN is true for interior zero-length arrays. */
4454 bool int0len = false;
4455 tree size = component_ref_size (ptr, &int0len);
4456 if (int0len)
4458 pref->sizrng[0] = pref->sizrng[1] = 0;
4459 return true;
4462 if (!size || TREE_CODE (size) != INTEGER_CST)
4463 return false;
4465 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
4466 return true;
4469 if (code == ARRAY_REF || code == MEM_REF)
4471 tree ref = TREE_OPERAND (ptr, 0);
4472 tree reftype = TREE_TYPE (ref);
4473 if (code == ARRAY_REF
4474 && TREE_CODE (TREE_TYPE (reftype)) == POINTER_TYPE)
4475 /* Avoid arrays of pointers. FIXME: Hande pointers to arrays
4476 of known bound. */
4477 return false;
4479 if (code == MEM_REF && TREE_CODE (reftype) == POINTER_TYPE)
4481 /* Give up for MEM_REFs of vector types; those may be synthesized
4482 from multiple assignments to consecutive data members. See PR
4483 93200.
4484 FIXME: Deal with this more generally, e.g., by marking up such
4485 MEM_REFs at the time they're created. */
4486 reftype = TREE_TYPE (reftype);
4487 if (TREE_CODE (reftype) == VECTOR_TYPE)
4488 return false;
4491 if (!compute_objsize (ref, ostype, pref, visited, rvals))
4492 return false;
4494 offset_int orng[2];
4495 tree off = TREE_OPERAND (ptr, 1);
4496 if (!get_range (off, NULL, SIGNED, orng, rvals))
4497 /* Fail unless the size of the object is zero. */
4498 return pref->sizrng[0] == 0 && pref->sizrng[0] == pref->sizrng[1];
4500 if (TREE_CODE (ptr) == ARRAY_REF)
4502 /* Convert the array index range determined above to a byte
4503 offset. */
4504 tree lowbnd = array_ref_low_bound (ptr);
4505 if (!integer_zerop (lowbnd) && tree_fits_uhwi_p (lowbnd))
4507 /* Adjust the index by the low bound of the array domain
4508 (normally zero but 1 in Fortran). */
4509 unsigned HOST_WIDE_INT lb = tree_to_uhwi (lowbnd);
4510 orng[0] -= lb;
4511 orng[1] -= lb;
4514 tree eltype = TREE_TYPE (ptr);
4515 tree tpsize = TYPE_SIZE_UNIT (eltype);
4516 if (!tpsize || TREE_CODE (tpsize) != INTEGER_CST)
4517 return false;
4519 offset_int sz = wi::to_offset (tpsize);
4520 orng[0] *= sz;
4521 orng[1] *= sz;
4523 if (ostype && TREE_CODE (eltype) == ARRAY_TYPE)
4525 /* Execpt for the permissive raw memory functions which
4526 use the size of the whole object determined above,
4527 use the size of the referenced array. */
4528 pref->sizrng[0] = pref->offrng[0] + orng[0] + sz;
4529 pref->sizrng[1] = pref->offrng[1] + orng[1] + sz;
4533 pref->offrng[0] += orng[0];
4534 pref->offrng[1] += orng[1];
4536 return true;
4539 if (TREE_CODE (ptr) == SSA_NAME)
4541 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
4542 if (is_gimple_call (stmt))
4544 /* If STMT is a call to an allocation function get the size
4545 from its argument(s). If successful, also set *PDECL to
4546 PTR for the caller to include in diagnostics. */
4547 wide_int wr[2];
4548 if (gimple_call_alloc_size (stmt, wr, rvals))
4550 pref->ref = ptr;
4551 pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
4552 pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
4553 return true;
4555 return false;
4558 if (gimple_nop_p (stmt))
4560 /* For a function argument try to determine the byte size
4561 of the array from the current function declaratation
4562 (e.g., attribute access or related). */
4563 wide_int wr[2];
4564 tree ref = gimple_parm_array_size (ptr, wr, rvals);
4565 if (!ref)
4566 return NULL_TREE;
4567 pref->ref = ref;
4568 pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
4569 pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
4570 return true;
4573 /* TODO: Handle PHI. */
4575 if (!is_gimple_assign (stmt))
4576 return false;
4578 ptr = gimple_assign_rhs1 (stmt);
4580 tree_code code = gimple_assign_rhs_code (stmt);
4581 if (TREE_CODE (TREE_TYPE (ptr)) != POINTER_TYPE)
4582 /* Avoid conversions from non-pointers. */
4583 return false;
4585 if (code == POINTER_PLUS_EXPR)
4587 /* If the the offset in the expression can be determined use
4588 it to adjust the overall offset. Otherwise, set the overall
4589 offset to the maximum. */
4590 offset_int orng[2];
4591 tree off = gimple_assign_rhs2 (stmt);
4592 if (!get_range (off, stmt, SIGNED, orng, rvals))
4594 orng[0] = wi::to_offset (TYPE_MIN_VALUE (ptrdiff_type_node));
4595 orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
4598 pref->offrng[0] += orng[0];
4599 pref->offrng[1] += orng[1];
4601 else if (code != ADDR_EXPR)
4602 return false;
4604 return compute_objsize (ptr, ostype, pref, visited, rvals);
4607 tree type = TREE_TYPE (ptr);
4608 type = TYPE_MAIN_VARIANT (type);
4609 if (TREE_CODE (ptr) == ADDR_EXPR)
4610 ptr = TREE_OPERAND (ptr, 0);
4612 if (TREE_CODE (type) == ARRAY_TYPE
4613 && !array_at_struct_end_p (ptr))
4615 if (tree size = TYPE_SIZE_UNIT (type))
4616 return get_range (size, NULL, UNSIGNED, pref->sizrng, rvals);
4619 return false;
4622 /* A "public" wrapper around the above. Clients should use this overload
4623 instead. */
4625 tree
4626 compute_objsize (tree ptr, int ostype, access_ref *pref,
4627 range_query *rvals /* = NULL */)
4629 bitmap visited = NULL;
4631 bool success
4632 = compute_objsize (ptr, ostype, pref, &visited, rvals);
4634 if (visited)
4635 BITMAP_FREE (visited);
4637 if (!success)
4638 return NULL_TREE;
4640 if (pref->offrng[1] < pref->offrng[0])
4642 if (pref->offrng[1] < 0
4643 && pref->sizrng[1] <= pref->offrng[0])
4644 return size_zero_node;
4646 return wide_int_to_tree (sizetype, pref->sizrng[1]);
4649 if (pref->offrng[0] < 0)
4651 if (pref->offrng[1] < 0)
4652 return size_zero_node;
4654 pref->offrng[0] = 0;
4657 if (pref->sizrng[1] <= pref->offrng[0])
4658 return size_zero_node;
4660 return wide_int_to_tree (sizetype, pref->sizrng[1] - pref->offrng[0]);
4663 /* Transitional wrapper around the above. The function should be removed
4664 once callers transition to one of the two above. */
4666 tree
4667 compute_objsize (tree ptr, int ostype, tree *pdecl /* = NULL */,
4668 tree *poff /* = NULL */, class range_query *rvals /* = NULL */)
4670 /* Set the initial offsets to zero and size to negative to indicate
4671 none has been computed yet. */
4672 access_ref ref;
4673 tree size = compute_objsize (ptr, ostype, &ref, rvals);
4674 if (!size)
4675 return NULL_TREE;
4677 if (pdecl)
4678 *pdecl = ref.ref;
4680 if (poff)
4681 *poff = wide_int_to_tree (ptrdiff_type_node, ref.offrng[ref.offrng[0] < 0]);
4683 return size;
4686 /* Helper to determine and check the sizes of the source and the destination
4687 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
4688 call expression, DEST is the destination argument, SRC is the source
4689 argument or null, and LEN is the number of bytes. Use Object Size type-0
4690 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
4691 (no overflow or invalid sizes), false otherwise. */
4693 static bool
4694 check_memop_access (tree exp, tree dest, tree src, tree size)
4696 /* For functions like memset and memcpy that operate on raw memory
4697 try to determine the size of the largest source and destination
4698 object using type-0 Object Size regardless of the object size
4699 type specified by the option. */
4700 access_data data (exp, access_read_write);
4701 tree srcsize = src ? compute_objsize (src, 0, &data.src) : NULL_TREE;
4702 tree dstsize = compute_objsize (dest, 0, &data.dst);
4704 return check_access (exp, size, /*maxread=*/NULL_TREE,
4705 srcsize, dstsize, data.mode, &data);
4708 /* Validate memchr arguments without performing any expansion.
4709 Return NULL_RTX. */
4711 static rtx
4712 expand_builtin_memchr (tree exp, rtx)
4714 if (!validate_arglist (exp,
4715 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4716 return NULL_RTX;
4718 tree arg1 = CALL_EXPR_ARG (exp, 0);
4719 tree len = CALL_EXPR_ARG (exp, 2);
4721 check_read_access (exp, arg1, len, 0);
4723 return NULL_RTX;
4726 /* Expand a call EXP to the memcpy builtin.
4727 Return NULL_RTX if we failed, the caller should emit a normal call,
4728 otherwise try to get the result in TARGET, if convenient (and in
4729 mode MODE if that's convenient). */
4731 static rtx
4732 expand_builtin_memcpy (tree exp, rtx target)
4734 if (!validate_arglist (exp,
4735 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4736 return NULL_RTX;
4738 tree dest = CALL_EXPR_ARG (exp, 0);
4739 tree src = CALL_EXPR_ARG (exp, 1);
4740 tree len = CALL_EXPR_ARG (exp, 2);
4742 check_memop_access (exp, dest, src, len);
4744 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
4745 /*retmode=*/ RETURN_BEGIN, false);
4748 /* Check a call EXP to the memmove built-in for validity.
4749 Return NULL_RTX on both success and failure. */
4751 static rtx
4752 expand_builtin_memmove (tree exp, rtx target)
4754 if (!validate_arglist (exp,
4755 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4756 return NULL_RTX;
4758 tree dest = CALL_EXPR_ARG (exp, 0);
4759 tree src = CALL_EXPR_ARG (exp, 1);
4760 tree len = CALL_EXPR_ARG (exp, 2);
4762 check_memop_access (exp, dest, src, len);
4764 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
4765 /*retmode=*/ RETURN_BEGIN, true);
4768 /* Expand a call EXP to the mempcpy builtin.
4769 Return NULL_RTX if we failed; the caller should emit a normal call,
4770 otherwise try to get the result in TARGET, if convenient (and in
4771 mode MODE if that's convenient). */
4773 static rtx
4774 expand_builtin_mempcpy (tree exp, rtx target)
4776 if (!validate_arglist (exp,
4777 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4778 return NULL_RTX;
4780 tree dest = CALL_EXPR_ARG (exp, 0);
4781 tree src = CALL_EXPR_ARG (exp, 1);
4782 tree len = CALL_EXPR_ARG (exp, 2);
4784 /* Policy does not generally allow using compute_objsize (which
4785 is used internally by check_memop_size) to change code generation
4786 or drive optimization decisions.
4788 In this instance it is safe because the code we generate has
4789 the same semantics regardless of the return value of
4790 check_memop_sizes. Exactly the same amount of data is copied
4791 and the return value is exactly the same in both cases.
4793 Furthermore, check_memop_size always uses mode 0 for the call to
4794 compute_objsize, so the imprecise nature of compute_objsize is
4795 avoided. */
4797 /* Avoid expanding mempcpy into memcpy when the call is determined
4798 to overflow the buffer. This also prevents the same overflow
4799 from being diagnosed again when expanding memcpy. */
4800 if (!check_memop_access (exp, dest, src, len))
4801 return NULL_RTX;
4803 return expand_builtin_mempcpy_args (dest, src, len,
4804 target, exp, /*retmode=*/ RETURN_END);
4807 /* Helper function to do the actual work for expand of memory copy family
4808 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
4809 of memory from SRC to DEST and assign to TARGET if convenient. Return
4810 value is based on RETMODE argument. */
4812 static rtx
4813 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
4814 rtx target, tree exp, memop_ret retmode,
4815 bool might_overlap)
4817 unsigned int src_align = get_pointer_alignment (src);
4818 unsigned int dest_align = get_pointer_alignment (dest);
4819 rtx dest_mem, src_mem, dest_addr, len_rtx;
4820 HOST_WIDE_INT expected_size = -1;
4821 unsigned int expected_align = 0;
4822 unsigned HOST_WIDE_INT min_size;
4823 unsigned HOST_WIDE_INT max_size;
4824 unsigned HOST_WIDE_INT probable_max_size;
4826 bool is_move_done;
4828 /* If DEST is not a pointer type, call the normal function. */
4829 if (dest_align == 0)
4830 return NULL_RTX;
4832 /* If either SRC is not a pointer type, don't do this
4833 operation in-line. */
4834 if (src_align == 0)
4835 return NULL_RTX;
4837 if (currently_expanding_gimple_stmt)
4838 stringop_block_profile (currently_expanding_gimple_stmt,
4839 &expected_align, &expected_size);
4841 if (expected_align < dest_align)
4842 expected_align = dest_align;
4843 dest_mem = get_memory_rtx (dest, len);
4844 set_mem_align (dest_mem, dest_align);
4845 len_rtx = expand_normal (len);
4846 determine_block_size (len, len_rtx, &min_size, &max_size,
4847 &probable_max_size);
4849 /* Try to get the byte representation of the constant SRC points to,
4850 with its byte size in NBYTES. */
4851 unsigned HOST_WIDE_INT nbytes;
4852 const char *rep = getbyterep (src, &nbytes);
4854 /* If the function's constant bound LEN_RTX is less than or equal
4855 to the byte size of the representation of the constant argument,
4856 and if block move would be done by pieces, we can avoid loading
4857 the bytes from memory and only store the computed constant.
4858 This works in the overlap (memmove) case as well because
4859 store_by_pieces just generates a series of stores of constants
4860 from the representation returned by getbyterep(). */
4861 if (rep
4862 && CONST_INT_P (len_rtx)
4863 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes
4864 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
4865 CONST_CAST (char *, rep),
4866 dest_align, false))
4868 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
4869 builtin_memcpy_read_str,
4870 CONST_CAST (char *, rep),
4871 dest_align, false, retmode);
4872 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4873 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4874 return dest_mem;
4877 src_mem = get_memory_rtx (src, len);
4878 set_mem_align (src_mem, src_align);
4880 /* Copy word part most expediently. */
4881 enum block_op_methods method = BLOCK_OP_NORMAL;
4882 if (CALL_EXPR_TAILCALL (exp)
4883 && (retmode == RETURN_BEGIN || target == const0_rtx))
4884 method = BLOCK_OP_TAILCALL;
4885 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
4886 && retmode == RETURN_END
4887 && !might_overlap
4888 && target != const0_rtx);
4889 if (use_mempcpy_call)
4890 method = BLOCK_OP_NO_LIBCALL_RET;
4891 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
4892 expected_align, expected_size,
4893 min_size, max_size, probable_max_size,
4894 use_mempcpy_call, &is_move_done,
4895 might_overlap);
4897 /* Bail out when a mempcpy call would be expanded as libcall and when
4898 we have a target that provides a fast implementation
4899 of mempcpy routine. */
4900 if (!is_move_done)
4901 return NULL_RTX;
4903 if (dest_addr == pc_rtx)
4904 return NULL_RTX;
4906 if (dest_addr == 0)
4908 dest_addr = force_operand (XEXP (dest_mem, 0), target);
4909 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4912 if (retmode != RETURN_BEGIN && target != const0_rtx)
4914 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
4915 /* stpcpy pointer to last byte. */
4916 if (retmode == RETURN_END_MINUS_ONE)
4917 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
4920 return dest_addr;
4923 static rtx
4924 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
4925 rtx target, tree orig_exp, memop_ret retmode)
4927 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
4928 retmode, false);
4931 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
4932 we failed, the caller should emit a normal call, otherwise try to
4933 get the result in TARGET, if convenient.
4934 Return value is based on RETMODE argument. */
4936 static rtx
4937 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
4939 class expand_operand ops[3];
4940 rtx dest_mem;
4941 rtx src_mem;
4943 if (!targetm.have_movstr ())
4944 return NULL_RTX;
4946 dest_mem = get_memory_rtx (dest, NULL);
4947 src_mem = get_memory_rtx (src, NULL);
4948 if (retmode == RETURN_BEGIN)
4950 target = force_reg (Pmode, XEXP (dest_mem, 0));
4951 dest_mem = replace_equiv_address (dest_mem, target);
4954 create_output_operand (&ops[0],
4955 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
4956 create_fixed_operand (&ops[1], dest_mem);
4957 create_fixed_operand (&ops[2], src_mem);
4958 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
4959 return NULL_RTX;
4961 if (retmode != RETURN_BEGIN && target != const0_rtx)
4963 target = ops[0].value;
4964 /* movstr is supposed to set end to the address of the NUL
4965 terminator. If the caller requested a mempcpy-like return value,
4966 adjust it. */
4967 if (retmode == RETURN_END)
4969 rtx tem = plus_constant (GET_MODE (target),
4970 gen_lowpart (GET_MODE (target), target), 1);
4971 emit_move_insn (target, force_operand (tem, NULL_RTX));
4974 return target;
4977 /* Do some very basic size validation of a call to the strcpy builtin
4978 given by EXP. Return NULL_RTX to have the built-in expand to a call
4979 to the library function. */
4981 static rtx
4982 expand_builtin_strcat (tree exp)
4984 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
4985 || !warn_stringop_overflow)
4986 return NULL_RTX;
4988 tree dest = CALL_EXPR_ARG (exp, 0);
4989 tree src = CALL_EXPR_ARG (exp, 1);
4991 /* There is no way here to determine the length of the string in
4992 the destination to which the SRC string is being appended so
4993 just diagnose cases when the souce string is longer than
4994 the destination object. */
4995 access_data data (exp, access_read_write, NULL_TREE, true,
4996 NULL_TREE, true);
4997 const int ost = warn_stringop_overflow ? warn_stringop_overflow - 1 : 1;
4998 compute_objsize (src, ost, &data.src);
4999 tree destsize = compute_objsize (dest, ost, &data.dst);
5001 check_access (exp, /*dstwrite=*/NULL_TREE, /*maxread=*/NULL_TREE,
5002 src, destsize, data.mode, &data);
5004 return NULL_RTX;
5007 /* Expand expression EXP, which is a call to the strcpy builtin. Return
5008 NULL_RTX if we failed the caller should emit a normal call, otherwise
5009 try to get the result in TARGET, if convenient (and in mode MODE if that's
5010 convenient). */
5012 static rtx
5013 expand_builtin_strcpy (tree exp, rtx target)
5015 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5016 return NULL_RTX;
5018 tree dest = CALL_EXPR_ARG (exp, 0);
5019 tree src = CALL_EXPR_ARG (exp, 1);
5021 if (warn_stringop_overflow)
5023 access_data data (exp, access_read_write, NULL_TREE, true,
5024 NULL_TREE, true);
5025 const int ost = warn_stringop_overflow ? warn_stringop_overflow - 1 : 1;
5026 compute_objsize (src, ost, &data.src);
5027 tree dstsize = compute_objsize (dest, ost, &data.dst);
5028 check_access (exp, /*dstwrite=*/ NULL_TREE,
5029 /*maxread=*/ NULL_TREE, /*srcstr=*/ src,
5030 dstsize, data.mode, &data);
5033 if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
5035 /* Check to see if the argument was declared attribute nonstring
5036 and if so, issue a warning since at this point it's not known
5037 to be nul-terminated. */
5038 tree fndecl = get_callee_fndecl (exp);
5039 maybe_warn_nonstring_arg (fndecl, exp);
5040 return ret;
5043 return NULL_RTX;
5046 /* Helper function to do the actual work for expand_builtin_strcpy. The
5047 arguments to the builtin_strcpy call DEST and SRC are broken out
5048 so that this can also be called without constructing an actual CALL_EXPR.
5049 The other arguments and return value are the same as for
5050 expand_builtin_strcpy. */
5052 static rtx
5053 expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target)
5055 /* Detect strcpy calls with unterminated arrays.. */
5056 tree size;
5057 bool exact;
5058 if (tree nonstr = unterminated_array (src, &size, &exact))
5060 /* NONSTR refers to the non-nul terminated constant array. */
5061 warn_string_no_nul (EXPR_LOCATION (exp), exp, NULL, src, nonstr,
5062 size, exact);
5063 return NULL_RTX;
5066 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
5069 /* Expand a call EXP to the stpcpy builtin.
5070 Return NULL_RTX if we failed the caller should emit a normal call,
5071 otherwise try to get the result in TARGET, if convenient (and in
5072 mode MODE if that's convenient). */
5074 static rtx
5075 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
5077 tree dst, src;
5078 location_t loc = EXPR_LOCATION (exp);
5080 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5081 return NULL_RTX;
5083 dst = CALL_EXPR_ARG (exp, 0);
5084 src = CALL_EXPR_ARG (exp, 1);
5086 if (warn_stringop_overflow)
5088 access_data data (exp, access_read_write);
5089 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1,
5090 &data.dst);
5091 check_access (exp, /*dstwrite=*/NULL_TREE, /*maxread=*/NULL_TREE,
5092 src, destsize, data.mode, &data);
5095 /* If return value is ignored, transform stpcpy into strcpy. */
5096 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
5098 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
5099 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
5100 return expand_expr (result, target, mode, EXPAND_NORMAL);
5102 else
5104 tree len, lenp1;
5105 rtx ret;
5107 /* Ensure we get an actual string whose length can be evaluated at
5108 compile-time, not an expression containing a string. This is
5109 because the latter will potentially produce pessimized code
5110 when used to produce the return value. */
5111 c_strlen_data lendata = { };
5112 if (!c_getstr (src)
5113 || !(len = c_strlen (src, 0, &lendata, 1)))
5114 return expand_movstr (dst, src, target,
5115 /*retmode=*/ RETURN_END_MINUS_ONE);
5117 if (lendata.decl)
5118 warn_string_no_nul (EXPR_LOCATION (exp), exp, NULL, src, lendata.decl);
5120 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
5121 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
5122 target, exp,
5123 /*retmode=*/ RETURN_END_MINUS_ONE);
5125 if (ret)
5126 return ret;
5128 if (TREE_CODE (len) == INTEGER_CST)
5130 rtx len_rtx = expand_normal (len);
5132 if (CONST_INT_P (len_rtx))
5134 ret = expand_builtin_strcpy_args (exp, dst, src, target);
5136 if (ret)
5138 if (! target)
5140 if (mode != VOIDmode)
5141 target = gen_reg_rtx (mode);
5142 else
5143 target = gen_reg_rtx (GET_MODE (ret));
5145 if (GET_MODE (target) != GET_MODE (ret))
5146 ret = gen_lowpart (GET_MODE (target), ret);
5148 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
5149 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
5150 gcc_assert (ret);
5152 return target;
5157 return expand_movstr (dst, src, target,
5158 /*retmode=*/ RETURN_END_MINUS_ONE);
5162 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
5163 arguments while being careful to avoid duplicate warnings (which could
5164 be issued if the expander were to expand the call, resulting in it
5165 being emitted in expand_call(). */
5167 static rtx
5168 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
5170 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
5172 /* The call has been successfully expanded. Check for nonstring
5173 arguments and issue warnings as appropriate. */
5174 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
5175 return ret;
5178 return NULL_RTX;
5181 /* Check a call EXP to the stpncpy built-in for validity.
5182 Return NULL_RTX on both success and failure. */
5184 static rtx
5185 expand_builtin_stpncpy (tree exp, rtx)
5187 if (!validate_arglist (exp,
5188 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5189 || !warn_stringop_overflow)
5190 return NULL_RTX;
5192 /* The source and destination of the call. */
5193 tree dest = CALL_EXPR_ARG (exp, 0);
5194 tree src = CALL_EXPR_ARG (exp, 1);
5196 /* The exact number of bytes to write (not the maximum). */
5197 tree len = CALL_EXPR_ARG (exp, 2);
5198 access_data data (exp, access_read_write);
5199 /* The size of the destination object. */
5200 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
5201 check_access (exp, len, /*maxread=*/len, src, destsize, data.mode, &data);
5202 return NULL_RTX;
5205 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
5206 bytes from constant string DATA + OFFSET and return it as target
5207 constant. */
5210 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
5211 scalar_int_mode mode)
5213 const char *str = (const char *) data;
5215 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
5216 return const0_rtx;
5218 return c_readstr (str + offset, mode);
5221 /* Helper to check the sizes of sequences and the destination of calls
5222 to __builtin_strncat and __builtin___strncat_chk. Returns true on
5223 success (no overflow or invalid sizes), false otherwise. */
5225 static bool
5226 check_strncat_sizes (tree exp, tree objsize)
5228 tree dest = CALL_EXPR_ARG (exp, 0);
5229 tree src = CALL_EXPR_ARG (exp, 1);
5230 tree maxread = CALL_EXPR_ARG (exp, 2);
5232 /* Try to determine the range of lengths that the source expression
5233 refers to. */
5234 c_strlen_data lendata = { };
5235 get_range_strlen (src, &lendata, /* eltsize = */ 1);
5237 /* Try to verify that the destination is big enough for the shortest
5238 string. */
5240 access_data data (exp, access_read_write, maxread, true);
5241 if (!objsize && warn_stringop_overflow)
5243 /* If it hasn't been provided by __strncat_chk, try to determine
5244 the size of the destination object into which the source is
5245 being copied. */
5246 objsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
5249 /* Add one for the terminating nul. */
5250 tree srclen = (lendata.minlen
5251 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
5252 size_one_node)
5253 : NULL_TREE);
5255 /* The strncat function copies at most MAXREAD bytes and always appends
5256 the terminating nul so the specified upper bound should never be equal
5257 to (or greater than) the size of the destination. */
5258 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
5259 && tree_int_cst_equal (objsize, maxread))
5261 location_t loc = tree_nonartificial_location (exp);
5262 loc = expansion_point_location_if_in_system_header (loc);
5264 warning_at (loc, OPT_Wstringop_overflow_,
5265 "%K%qD specified bound %E equals destination size",
5266 exp, get_callee_fndecl (exp), maxread);
5268 return false;
5271 if (!srclen
5272 || (maxread && tree_fits_uhwi_p (maxread)
5273 && tree_fits_uhwi_p (srclen)
5274 && tree_int_cst_lt (maxread, srclen)))
5275 srclen = maxread;
5277 /* The number of bytes to write is LEN but check_access will alsoa
5278 check SRCLEN if LEN's value isn't known. */
5279 return check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
5280 objsize, data.mode, &data);
5283 /* Similar to expand_builtin_strcat, do some very basic size validation
5284 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
5285 the built-in expand to a call to the library function. */
5287 static rtx
5288 expand_builtin_strncat (tree exp, rtx)
5290 if (!validate_arglist (exp,
5291 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5292 || !warn_stringop_overflow)
5293 return NULL_RTX;
5295 tree dest = CALL_EXPR_ARG (exp, 0);
5296 tree src = CALL_EXPR_ARG (exp, 1);
5297 /* The upper bound on the number of bytes to write. */
5298 tree maxread = CALL_EXPR_ARG (exp, 2);
5300 /* Detect unterminated source (only). */
5301 if (!check_nul_terminated_array (exp, src, maxread))
5302 return NULL_RTX;
5304 /* The length of the source sequence. */
5305 tree slen = c_strlen (src, 1);
5307 /* Try to determine the range of lengths that the source expression
5308 refers to. Since the lengths are only used for warning and not
5309 for code generation disable strict mode below. */
5310 tree maxlen = slen;
5311 if (!maxlen)
5313 c_strlen_data lendata = { };
5314 get_range_strlen (src, &lendata, /* eltsize = */ 1);
5315 maxlen = lendata.maxbound;
5318 access_data data (exp, access_read_write);
5319 /* Try to verify that the destination is big enough for the shortest
5320 string. First try to determine the size of the destination object
5321 into which the source is being copied. */
5322 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
5324 /* Add one for the terminating nul. */
5325 tree srclen = (maxlen
5326 ? fold_build2 (PLUS_EXPR, size_type_node, maxlen,
5327 size_one_node)
5328 : NULL_TREE);
5330 /* The strncat function copies at most MAXREAD bytes and always appends
5331 the terminating nul so the specified upper bound should never be equal
5332 to (or greater than) the size of the destination. */
5333 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
5334 && tree_int_cst_equal (destsize, maxread))
5336 location_t loc = tree_nonartificial_location (exp);
5337 loc = expansion_point_location_if_in_system_header (loc);
5339 warning_at (loc, OPT_Wstringop_overflow_,
5340 "%K%qD specified bound %E equals destination size",
5341 exp, get_callee_fndecl (exp), maxread);
5343 return NULL_RTX;
5346 if (!srclen
5347 || (maxread && tree_fits_uhwi_p (maxread)
5348 && tree_fits_uhwi_p (srclen)
5349 && tree_int_cst_lt (maxread, srclen)))
5350 srclen = maxread;
5352 check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
5353 destsize, data.mode, &data);
5354 return NULL_RTX;
5357 /* Expand expression EXP, which is a call to the strncpy builtin. Return
5358 NULL_RTX if we failed the caller should emit a normal call. */
5360 static rtx
5361 expand_builtin_strncpy (tree exp, rtx target)
5363 location_t loc = EXPR_LOCATION (exp);
5365 if (!validate_arglist (exp,
5366 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5367 return NULL_RTX;
5368 tree dest = CALL_EXPR_ARG (exp, 0);
5369 tree src = CALL_EXPR_ARG (exp, 1);
5370 /* The number of bytes to write (not the maximum). */
5371 tree len = CALL_EXPR_ARG (exp, 2);
5373 /* The length of the source sequence. */
5374 tree slen = c_strlen (src, 1);
5376 if (warn_stringop_overflow)
5378 access_data data (exp, access_read_write, len, true, len, true);
5379 const int ost = warn_stringop_overflow ? warn_stringop_overflow - 1 : 1;
5380 compute_objsize (src, ost, &data.src);
5381 tree dstsize = compute_objsize (dest, ost, &data.dst);
5382 /* The number of bytes to write is LEN but check_access will also
5383 check SLEN if LEN's value isn't known. */
5384 check_access (exp, /*dstwrite=*/len,
5385 /*maxread=*/len, src, dstsize, data.mode, &data);
5388 /* We must be passed a constant len and src parameter. */
5389 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
5390 return NULL_RTX;
5392 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
5394 /* We're required to pad with trailing zeros if the requested
5395 len is greater than strlen(s2)+1. In that case try to
5396 use store_by_pieces, if it fails, punt. */
5397 if (tree_int_cst_lt (slen, len))
5399 unsigned int dest_align = get_pointer_alignment (dest);
5400 const char *p = c_getstr (src);
5401 rtx dest_mem;
5403 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
5404 || !can_store_by_pieces (tree_to_uhwi (len),
5405 builtin_strncpy_read_str,
5406 CONST_CAST (char *, p),
5407 dest_align, false))
5408 return NULL_RTX;
5410 dest_mem = get_memory_rtx (dest, len);
5411 store_by_pieces (dest_mem, tree_to_uhwi (len),
5412 builtin_strncpy_read_str,
5413 CONST_CAST (char *, p), dest_align, false,
5414 RETURN_BEGIN);
5415 dest_mem = force_operand (XEXP (dest_mem, 0), target);
5416 dest_mem = convert_memory_address (ptr_mode, dest_mem);
5417 return dest_mem;
5420 return NULL_RTX;
5423 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
5424 bytes from constant string DATA + OFFSET and return it as target
5425 constant. */
5428 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
5429 scalar_int_mode mode)
5431 const char *c = (const char *) data;
5432 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
5434 memset (p, *c, GET_MODE_SIZE (mode));
5436 return c_readstr (p, mode);
5439 /* Callback routine for store_by_pieces. Return the RTL of a register
5440 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
5441 char value given in the RTL register data. For example, if mode is
5442 4 bytes wide, return the RTL for 0x01010101*data. */
5444 static rtx
5445 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
5446 scalar_int_mode mode)
5448 rtx target, coeff;
5449 size_t size;
5450 char *p;
5452 size = GET_MODE_SIZE (mode);
5453 if (size == 1)
5454 return (rtx) data;
5456 p = XALLOCAVEC (char, size);
5457 memset (p, 1, size);
5458 coeff = c_readstr (p, mode);
5460 target = convert_to_mode (mode, (rtx) data, 1);
5461 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
5462 return force_reg (mode, target);
5465 /* Expand expression EXP, which is a call to the memset builtin. Return
5466 NULL_RTX if we failed the caller should emit a normal call, otherwise
5467 try to get the result in TARGET, if convenient (and in mode MODE if that's
5468 convenient). */
5470 static rtx
5471 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
5473 if (!validate_arglist (exp,
5474 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
5475 return NULL_RTX;
5477 tree dest = CALL_EXPR_ARG (exp, 0);
5478 tree val = CALL_EXPR_ARG (exp, 1);
5479 tree len = CALL_EXPR_ARG (exp, 2);
5481 check_memop_access (exp, dest, NULL_TREE, len);
5483 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
5486 /* Helper function to do the actual work for expand_builtin_memset. The
5487 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
5488 so that this can also be called without constructing an actual CALL_EXPR.
5489 The other arguments and return value are the same as for
5490 expand_builtin_memset. */
5492 static rtx
5493 expand_builtin_memset_args (tree dest, tree val, tree len,
5494 rtx target, machine_mode mode, tree orig_exp)
5496 tree fndecl, fn;
5497 enum built_in_function fcode;
5498 machine_mode val_mode;
5499 char c;
5500 unsigned int dest_align;
5501 rtx dest_mem, dest_addr, len_rtx;
5502 HOST_WIDE_INT expected_size = -1;
5503 unsigned int expected_align = 0;
5504 unsigned HOST_WIDE_INT min_size;
5505 unsigned HOST_WIDE_INT max_size;
5506 unsigned HOST_WIDE_INT probable_max_size;
5508 dest_align = get_pointer_alignment (dest);
5510 /* If DEST is not a pointer type, don't do this operation in-line. */
5511 if (dest_align == 0)
5512 return NULL_RTX;
5514 if (currently_expanding_gimple_stmt)
5515 stringop_block_profile (currently_expanding_gimple_stmt,
5516 &expected_align, &expected_size);
5518 if (expected_align < dest_align)
5519 expected_align = dest_align;
5521 /* If the LEN parameter is zero, return DEST. */
5522 if (integer_zerop (len))
5524 /* Evaluate and ignore VAL in case it has side-effects. */
5525 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
5526 return expand_expr (dest, target, mode, EXPAND_NORMAL);
5529 /* Stabilize the arguments in case we fail. */
5530 dest = builtin_save_expr (dest);
5531 val = builtin_save_expr (val);
5532 len = builtin_save_expr (len);
5534 len_rtx = expand_normal (len);
5535 determine_block_size (len, len_rtx, &min_size, &max_size,
5536 &probable_max_size);
5537 dest_mem = get_memory_rtx (dest, len);
5538 val_mode = TYPE_MODE (unsigned_char_type_node);
5540 if (TREE_CODE (val) != INTEGER_CST)
5542 rtx val_rtx;
5544 val_rtx = expand_normal (val);
5545 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
5547 /* Assume that we can memset by pieces if we can store
5548 * the coefficients by pieces (in the required modes).
5549 * We can't pass builtin_memset_gen_str as that emits RTL. */
5550 c = 1;
5551 if (tree_fits_uhwi_p (len)
5552 && can_store_by_pieces (tree_to_uhwi (len),
5553 builtin_memset_read_str, &c, dest_align,
5554 true))
5556 val_rtx = force_reg (val_mode, val_rtx);
5557 store_by_pieces (dest_mem, tree_to_uhwi (len),
5558 builtin_memset_gen_str, val_rtx, dest_align,
5559 true, RETURN_BEGIN);
5561 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
5562 dest_align, expected_align,
5563 expected_size, min_size, max_size,
5564 probable_max_size))
5565 goto do_libcall;
5567 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
5568 dest_mem = convert_memory_address (ptr_mode, dest_mem);
5569 return dest_mem;
5572 if (target_char_cast (val, &c))
5573 goto do_libcall;
5575 if (c)
5577 if (tree_fits_uhwi_p (len)
5578 && can_store_by_pieces (tree_to_uhwi (len),
5579 builtin_memset_read_str, &c, dest_align,
5580 true))
5581 store_by_pieces (dest_mem, tree_to_uhwi (len),
5582 builtin_memset_read_str, &c, dest_align, true,
5583 RETURN_BEGIN);
5584 else if (!set_storage_via_setmem (dest_mem, len_rtx,
5585 gen_int_mode (c, val_mode),
5586 dest_align, expected_align,
5587 expected_size, min_size, max_size,
5588 probable_max_size))
5589 goto do_libcall;
5591 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
5592 dest_mem = convert_memory_address (ptr_mode, dest_mem);
5593 return dest_mem;
5596 set_mem_align (dest_mem, dest_align);
5597 dest_addr = clear_storage_hints (dest_mem, len_rtx,
5598 CALL_EXPR_TAILCALL (orig_exp)
5599 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
5600 expected_align, expected_size,
5601 min_size, max_size,
5602 probable_max_size);
5604 if (dest_addr == 0)
5606 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
5607 dest_addr = convert_memory_address (ptr_mode, dest_addr);
5610 return dest_addr;
5612 do_libcall:
5613 fndecl = get_callee_fndecl (orig_exp);
5614 fcode = DECL_FUNCTION_CODE (fndecl);
5615 if (fcode == BUILT_IN_MEMSET)
5616 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
5617 dest, val, len);
5618 else if (fcode == BUILT_IN_BZERO)
5619 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
5620 dest, len);
5621 else
5622 gcc_unreachable ();
5623 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5624 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
5625 return expand_call (fn, target, target == const0_rtx);
5628 /* Expand expression EXP, which is a call to the bzero builtin. Return
5629 NULL_RTX if we failed the caller should emit a normal call. */
5631 static rtx
5632 expand_builtin_bzero (tree exp)
5634 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5635 return NULL_RTX;
5637 tree dest = CALL_EXPR_ARG (exp, 0);
5638 tree size = CALL_EXPR_ARG (exp, 1);
5640 check_memop_access (exp, dest, NULL_TREE, size);
5642 /* New argument list transforming bzero(ptr x, int y) to
5643 memset(ptr x, int 0, size_t y). This is done this way
5644 so that if it isn't expanded inline, we fallback to
5645 calling bzero instead of memset. */
5647 location_t loc = EXPR_LOCATION (exp);
5649 return expand_builtin_memset_args (dest, integer_zero_node,
5650 fold_convert_loc (loc,
5651 size_type_node, size),
5652 const0_rtx, VOIDmode, exp);
5655 /* Try to expand cmpstr operation ICODE with the given operands.
5656 Return the result rtx on success, otherwise return null. */
5658 static rtx
5659 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
5660 HOST_WIDE_INT align)
5662 machine_mode insn_mode = insn_data[icode].operand[0].mode;
5664 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
5665 target = NULL_RTX;
5667 class expand_operand ops[4];
5668 create_output_operand (&ops[0], target, insn_mode);
5669 create_fixed_operand (&ops[1], arg1_rtx);
5670 create_fixed_operand (&ops[2], arg2_rtx);
5671 create_integer_operand (&ops[3], align);
5672 if (maybe_expand_insn (icode, 4, ops))
5673 return ops[0].value;
5674 return NULL_RTX;
5677 /* Expand expression EXP, which is a call to the memcmp built-in function.
5678 Return NULL_RTX if we failed and the caller should emit a normal call,
5679 otherwise try to get the result in TARGET, if convenient.
5680 RESULT_EQ is true if we can relax the returned value to be either zero
5681 or nonzero, without caring about the sign. */
5683 static rtx
5684 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
5686 if (!validate_arglist (exp,
5687 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5688 return NULL_RTX;
5690 tree arg1 = CALL_EXPR_ARG (exp, 0);
5691 tree arg2 = CALL_EXPR_ARG (exp, 1);
5692 tree len = CALL_EXPR_ARG (exp, 2);
5694 /* Diagnose calls where the specified length exceeds the size of either
5695 object. */
5696 if (!check_read_access (exp, arg1, len, 0)
5697 || !check_read_access (exp, arg2, len, 0))
5698 return NULL_RTX;
5700 /* Due to the performance benefit, always inline the calls first
5701 when result_eq is false. */
5702 rtx result = NULL_RTX;
5703 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
5704 if (!result_eq && fcode != BUILT_IN_BCMP)
5706 result = inline_expand_builtin_bytecmp (exp, target);
5707 if (result)
5708 return result;
5711 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5712 location_t loc = EXPR_LOCATION (exp);
5714 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5715 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
5717 /* If we don't have POINTER_TYPE, call the function. */
5718 if (arg1_align == 0 || arg2_align == 0)
5719 return NULL_RTX;
5721 rtx arg1_rtx = get_memory_rtx (arg1, len);
5722 rtx arg2_rtx = get_memory_rtx (arg2, len);
5723 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
5725 /* Set MEM_SIZE as appropriate. */
5726 if (CONST_INT_P (len_rtx))
5728 set_mem_size (arg1_rtx, INTVAL (len_rtx));
5729 set_mem_size (arg2_rtx, INTVAL (len_rtx));
5732 by_pieces_constfn constfn = NULL;
5734 /* Try to get the byte representation of the constant ARG2 (or, only
5735 when the function's result is used for equality to zero, ARG1)
5736 points to, with its byte size in NBYTES. */
5737 unsigned HOST_WIDE_INT nbytes;
5738 const char *rep = getbyterep (arg2, &nbytes);
5739 if (result_eq && rep == NULL)
5741 /* For equality to zero the arguments are interchangeable. */
5742 rep = getbyterep (arg1, &nbytes);
5743 if (rep != NULL)
5744 std::swap (arg1_rtx, arg2_rtx);
5747 /* If the function's constant bound LEN_RTX is less than or equal
5748 to the byte size of the representation of the constant argument,
5749 and if block move would be done by pieces, we can avoid loading
5750 the bytes from memory and only store the computed constant result. */
5751 if (rep
5752 && CONST_INT_P (len_rtx)
5753 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes)
5754 constfn = builtin_memcpy_read_str;
5756 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
5757 TREE_TYPE (len), target,
5758 result_eq, constfn,
5759 CONST_CAST (char *, rep));
5761 if (result)
5763 /* Return the value in the proper mode for this function. */
5764 if (GET_MODE (result) == mode)
5765 return result;
5767 if (target != 0)
5769 convert_move (target, result, 0);
5770 return target;
5773 return convert_to_mode (mode, result, 0);
5776 return NULL_RTX;
5779 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
5780 if we failed the caller should emit a normal call, otherwise try to get
5781 the result in TARGET, if convenient. */
5783 static rtx
5784 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
5786 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5787 return NULL_RTX;
5789 tree arg1 = CALL_EXPR_ARG (exp, 0);
5790 tree arg2 = CALL_EXPR_ARG (exp, 1);
5792 if (!check_read_access (exp, arg1)
5793 || !check_read_access (exp, arg2))
5794 return NULL_RTX;
5796 /* Due to the performance benefit, always inline the calls first. */
5797 rtx result = NULL_RTX;
5798 result = inline_expand_builtin_bytecmp (exp, target);
5799 if (result)
5800 return result;
5802 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
5803 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
5804 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
5805 return NULL_RTX;
5807 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5808 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
5810 /* If we don't have POINTER_TYPE, call the function. */
5811 if (arg1_align == 0 || arg2_align == 0)
5812 return NULL_RTX;
5814 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
5815 arg1 = builtin_save_expr (arg1);
5816 arg2 = builtin_save_expr (arg2);
5818 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
5819 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
5821 /* Try to call cmpstrsi. */
5822 if (cmpstr_icode != CODE_FOR_nothing)
5823 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
5824 MIN (arg1_align, arg2_align));
5826 /* Try to determine at least one length and call cmpstrnsi. */
5827 if (!result && cmpstrn_icode != CODE_FOR_nothing)
5829 tree len;
5830 rtx arg3_rtx;
5832 tree len1 = c_strlen (arg1, 1);
5833 tree len2 = c_strlen (arg2, 1);
5835 if (len1)
5836 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
5837 if (len2)
5838 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
5840 /* If we don't have a constant length for the first, use the length
5841 of the second, if we know it. We don't require a constant for
5842 this case; some cost analysis could be done if both are available
5843 but neither is constant. For now, assume they're equally cheap,
5844 unless one has side effects. If both strings have constant lengths,
5845 use the smaller. */
5847 if (!len1)
5848 len = len2;
5849 else if (!len2)
5850 len = len1;
5851 else if (TREE_SIDE_EFFECTS (len1))
5852 len = len2;
5853 else if (TREE_SIDE_EFFECTS (len2))
5854 len = len1;
5855 else if (TREE_CODE (len1) != INTEGER_CST)
5856 len = len2;
5857 else if (TREE_CODE (len2) != INTEGER_CST)
5858 len = len1;
5859 else if (tree_int_cst_lt (len1, len2))
5860 len = len1;
5861 else
5862 len = len2;
5864 /* If both arguments have side effects, we cannot optimize. */
5865 if (len && !TREE_SIDE_EFFECTS (len))
5867 arg3_rtx = expand_normal (len);
5868 result = expand_cmpstrn_or_cmpmem
5869 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
5870 arg3_rtx, MIN (arg1_align, arg2_align));
5874 tree fndecl = get_callee_fndecl (exp);
5875 if (result)
5877 /* Check to see if the argument was declared attribute nonstring
5878 and if so, issue a warning since at this point it's not known
5879 to be nul-terminated. */
5880 maybe_warn_nonstring_arg (fndecl, exp);
5882 /* Return the value in the proper mode for this function. */
5883 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5884 if (GET_MODE (result) == mode)
5885 return result;
5886 if (target == 0)
5887 return convert_to_mode (mode, result, 0);
5888 convert_move (target, result, 0);
5889 return target;
5892 /* Expand the library call ourselves using a stabilized argument
5893 list to avoid re-evaluating the function's arguments twice. */
5894 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
5895 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5896 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5897 return expand_call (fn, target, target == const0_rtx);
5900 /* Expand expression EXP, which is a call to the strncmp builtin. Return
5901 NULL_RTX if we failed the caller should emit a normal call, otherwise
5902 try to get the result in TARGET, if convenient. */
5904 static rtx
5905 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
5906 ATTRIBUTE_UNUSED machine_mode mode)
5908 if (!validate_arglist (exp,
5909 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5910 return NULL_RTX;
5912 tree arg1 = CALL_EXPR_ARG (exp, 0);
5913 tree arg2 = CALL_EXPR_ARG (exp, 1);
5914 tree arg3 = CALL_EXPR_ARG (exp, 2);
5916 if (!check_nul_terminated_array (exp, arg1, arg3)
5917 || !check_nul_terminated_array (exp, arg2, arg3))
5918 return NULL_RTX;
5920 location_t loc = tree_nonartificial_location (exp);
5921 loc = expansion_point_location_if_in_system_header (loc);
5923 tree len1 = c_strlen (arg1, 1);
5924 tree len2 = c_strlen (arg2, 1);
5926 if (!len1 || !len2)
5928 /* Check to see if the argument was declared attribute nonstring
5929 and if so, issue a warning since at this point it's not known
5930 to be nul-terminated. */
5931 if (!maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp)
5932 && !len1 && !len2)
5934 /* A strncmp read is constrained not just by the bound but
5935 also by the length of the shorter string. Specifying
5936 a bound that's larger than the size of either array makes
5937 no sense and is likely a bug. When the length of neither
5938 of the two strings is known but the sizes of both of
5939 the arrays they are stored in is, issue a warning if
5940 the bound is larger than than the size of the larger
5941 of the two arrays. */
5943 access_ref ref1 (arg3, true);
5944 access_ref ref2 (arg3, true);
5946 tree bndrng[2] = { NULL_TREE, NULL_TREE };
5947 get_size_range (arg3, bndrng, ref1.bndrng);
5949 tree size1 = compute_objsize (arg1, 1, &ref1);
5950 tree size2 = compute_objsize (arg2, 1, &ref2);
5951 tree func = get_callee_fndecl (exp);
5953 if (size1 && size2)
5955 tree maxsize = tree_int_cst_le (size1, size2) ? size2 : size1;
5957 if (tree_int_cst_lt (maxsize, bndrng[0]))
5958 maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp, func,
5959 bndrng, maxsize);
5961 else if (bndrng[0]
5962 && !integer_zerop (bndrng[0])
5963 && ((size1 && integer_zerop (size1))
5964 || (size2 && integer_zerop (size2))))
5965 maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp, func,
5966 bndrng, integer_zero_node);
5970 /* Due to the performance benefit, always inline the calls first. */
5971 rtx result = NULL_RTX;
5972 result = inline_expand_builtin_bytecmp (exp, target);
5973 if (result)
5974 return result;
5976 /* If c_strlen can determine an expression for one of the string
5977 lengths, and it doesn't have side effects, then emit cmpstrnsi
5978 using length MIN(strlen(string)+1, arg3). */
5979 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
5980 if (cmpstrn_icode == CODE_FOR_nothing)
5981 return NULL_RTX;
5983 tree len;
5985 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5986 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
5988 if (len1)
5989 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
5990 if (len2)
5991 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
5993 tree len3 = fold_convert_loc (loc, sizetype, arg3);
5995 /* If we don't have a constant length for the first, use the length
5996 of the second, if we know it. If neither string is constant length,
5997 use the given length argument. We don't require a constant for
5998 this case; some cost analysis could be done if both are available
5999 but neither is constant. For now, assume they're equally cheap,
6000 unless one has side effects. If both strings have constant lengths,
6001 use the smaller. */
6003 if (!len1 && !len2)
6004 len = len3;
6005 else if (!len1)
6006 len = len2;
6007 else if (!len2)
6008 len = len1;
6009 else if (TREE_SIDE_EFFECTS (len1))
6010 len = len2;
6011 else if (TREE_SIDE_EFFECTS (len2))
6012 len = len1;
6013 else if (TREE_CODE (len1) != INTEGER_CST)
6014 len = len2;
6015 else if (TREE_CODE (len2) != INTEGER_CST)
6016 len = len1;
6017 else if (tree_int_cst_lt (len1, len2))
6018 len = len1;
6019 else
6020 len = len2;
6022 /* If we are not using the given length, we must incorporate it here.
6023 The actual new length parameter will be MIN(len,arg3) in this case. */
6024 if (len != len3)
6026 len = fold_convert_loc (loc, sizetype, len);
6027 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
6029 rtx arg1_rtx = get_memory_rtx (arg1, len);
6030 rtx arg2_rtx = get_memory_rtx (arg2, len);
6031 rtx arg3_rtx = expand_normal (len);
6032 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
6033 arg2_rtx, TREE_TYPE (len), arg3_rtx,
6034 MIN (arg1_align, arg2_align));
6036 tree fndecl = get_callee_fndecl (exp);
6037 if (result)
6039 /* Return the value in the proper mode for this function. */
6040 mode = TYPE_MODE (TREE_TYPE (exp));
6041 if (GET_MODE (result) == mode)
6042 return result;
6043 if (target == 0)
6044 return convert_to_mode (mode, result, 0);
6045 convert_move (target, result, 0);
6046 return target;
6049 /* Expand the library call ourselves using a stabilized argument
6050 list to avoid re-evaluating the function's arguments twice. */
6051 tree call = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
6052 if (TREE_NO_WARNING (exp))
6053 TREE_NO_WARNING (call) = true;
6054 gcc_assert (TREE_CODE (call) == CALL_EXPR);
6055 CALL_EXPR_TAILCALL (call) = CALL_EXPR_TAILCALL (exp);
6056 return expand_call (call, target, target == const0_rtx);
6059 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
6060 if that's convenient. */
6063 expand_builtin_saveregs (void)
6065 rtx val;
6066 rtx_insn *seq;
6068 /* Don't do __builtin_saveregs more than once in a function.
6069 Save the result of the first call and reuse it. */
6070 if (saveregs_value != 0)
6071 return saveregs_value;
6073 /* When this function is called, it means that registers must be
6074 saved on entry to this function. So we migrate the call to the
6075 first insn of this function. */
6077 start_sequence ();
6079 /* Do whatever the machine needs done in this case. */
6080 val = targetm.calls.expand_builtin_saveregs ();
6082 seq = get_insns ();
6083 end_sequence ();
6085 saveregs_value = val;
6087 /* Put the insns after the NOTE that starts the function. If this
6088 is inside a start_sequence, make the outer-level insn chain current, so
6089 the code is placed at the start of the function. */
6090 push_topmost_sequence ();
6091 emit_insn_after (seq, entry_of_function ());
6092 pop_topmost_sequence ();
6094 return val;
6097 /* Expand a call to __builtin_next_arg. */
6099 static rtx
6100 expand_builtin_next_arg (void)
6102 /* Checking arguments is already done in fold_builtin_next_arg
6103 that must be called before this function. */
6104 return expand_binop (ptr_mode, add_optab,
6105 crtl->args.internal_arg_pointer,
6106 crtl->args.arg_offset_rtx,
6107 NULL_RTX, 0, OPTAB_LIB_WIDEN);
6110 /* Make it easier for the backends by protecting the valist argument
6111 from multiple evaluations. */
6113 static tree
6114 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
6116 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
6118 /* The current way of determining the type of valist is completely
6119 bogus. We should have the information on the va builtin instead. */
6120 if (!vatype)
6121 vatype = targetm.fn_abi_va_list (cfun->decl);
6123 if (TREE_CODE (vatype) == ARRAY_TYPE)
6125 if (TREE_SIDE_EFFECTS (valist))
6126 valist = save_expr (valist);
6128 /* For this case, the backends will be expecting a pointer to
6129 vatype, but it's possible we've actually been given an array
6130 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
6131 So fix it. */
6132 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
6134 tree p1 = build_pointer_type (TREE_TYPE (vatype));
6135 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
6138 else
6140 tree pt = build_pointer_type (vatype);
6142 if (! needs_lvalue)
6144 if (! TREE_SIDE_EFFECTS (valist))
6145 return valist;
6147 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
6148 TREE_SIDE_EFFECTS (valist) = 1;
6151 if (TREE_SIDE_EFFECTS (valist))
6152 valist = save_expr (valist);
6153 valist = fold_build2_loc (loc, MEM_REF,
6154 vatype, valist, build_int_cst (pt, 0));
6157 return valist;
6160 /* The "standard" definition of va_list is void*. */
6162 tree
6163 std_build_builtin_va_list (void)
6165 return ptr_type_node;
6168 /* The "standard" abi va_list is va_list_type_node. */
6170 tree
6171 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
6173 return va_list_type_node;
6176 /* The "standard" type of va_list is va_list_type_node. */
6178 tree
6179 std_canonical_va_list_type (tree type)
6181 tree wtype, htype;
6183 wtype = va_list_type_node;
6184 htype = type;
6186 if (TREE_CODE (wtype) == ARRAY_TYPE)
6188 /* If va_list is an array type, the argument may have decayed
6189 to a pointer type, e.g. by being passed to another function.
6190 In that case, unwrap both types so that we can compare the
6191 underlying records. */
6192 if (TREE_CODE (htype) == ARRAY_TYPE
6193 || POINTER_TYPE_P (htype))
6195 wtype = TREE_TYPE (wtype);
6196 htype = TREE_TYPE (htype);
6199 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
6200 return va_list_type_node;
6202 return NULL_TREE;
6205 /* The "standard" implementation of va_start: just assign `nextarg' to
6206 the variable. */
6208 void
6209 std_expand_builtin_va_start (tree valist, rtx nextarg)
6211 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
6212 convert_move (va_r, nextarg, 0);
6215 /* Expand EXP, a call to __builtin_va_start. */
6217 static rtx
6218 expand_builtin_va_start (tree exp)
6220 rtx nextarg;
6221 tree valist;
6222 location_t loc = EXPR_LOCATION (exp);
6224 if (call_expr_nargs (exp) < 2)
6226 error_at (loc, "too few arguments to function %<va_start%>");
6227 return const0_rtx;
6230 if (fold_builtin_next_arg (exp, true))
6231 return const0_rtx;
6233 nextarg = expand_builtin_next_arg ();
6234 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
6236 if (targetm.expand_builtin_va_start)
6237 targetm.expand_builtin_va_start (valist, nextarg);
6238 else
6239 std_expand_builtin_va_start (valist, nextarg);
6241 return const0_rtx;
6244 /* Expand EXP, a call to __builtin_va_end. */
6246 static rtx
6247 expand_builtin_va_end (tree exp)
6249 tree valist = CALL_EXPR_ARG (exp, 0);
6251 /* Evaluate for side effects, if needed. I hate macros that don't
6252 do that. */
6253 if (TREE_SIDE_EFFECTS (valist))
6254 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
6256 return const0_rtx;
6259 /* Expand EXP, a call to __builtin_va_copy. We do this as a
6260 builtin rather than just as an assignment in stdarg.h because of the
6261 nastiness of array-type va_list types. */
6263 static rtx
6264 expand_builtin_va_copy (tree exp)
6266 tree dst, src, t;
6267 location_t loc = EXPR_LOCATION (exp);
6269 dst = CALL_EXPR_ARG (exp, 0);
6270 src = CALL_EXPR_ARG (exp, 1);
6272 dst = stabilize_va_list_loc (loc, dst, 1);
6273 src = stabilize_va_list_loc (loc, src, 0);
6275 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
6277 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
6279 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
6280 TREE_SIDE_EFFECTS (t) = 1;
6281 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6283 else
6285 rtx dstb, srcb, size;
6287 /* Evaluate to pointers. */
6288 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
6289 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
6290 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
6291 NULL_RTX, VOIDmode, EXPAND_NORMAL);
6293 dstb = convert_memory_address (Pmode, dstb);
6294 srcb = convert_memory_address (Pmode, srcb);
6296 /* "Dereference" to BLKmode memories. */
6297 dstb = gen_rtx_MEM (BLKmode, dstb);
6298 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
6299 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
6300 srcb = gen_rtx_MEM (BLKmode, srcb);
6301 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
6302 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
6304 /* Copy. */
6305 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
6308 return const0_rtx;
6311 /* Expand a call to one of the builtin functions __builtin_frame_address or
6312 __builtin_return_address. */
6314 static rtx
6315 expand_builtin_frame_address (tree fndecl, tree exp)
6317 /* The argument must be a nonnegative integer constant.
6318 It counts the number of frames to scan up the stack.
6319 The value is either the frame pointer value or the return
6320 address saved in that frame. */
6321 if (call_expr_nargs (exp) == 0)
6322 /* Warning about missing arg was already issued. */
6323 return const0_rtx;
6324 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
6326 error ("invalid argument to %qD", fndecl);
6327 return const0_rtx;
6329 else
6331 /* Number of frames to scan up the stack. */
6332 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
6334 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
6336 /* Some ports cannot access arbitrary stack frames. */
6337 if (tem == NULL)
6339 warning (0, "unsupported argument to %qD", fndecl);
6340 return const0_rtx;
6343 if (count)
6345 /* Warn since no effort is made to ensure that any frame
6346 beyond the current one exists or can be safely reached. */
6347 warning (OPT_Wframe_address, "calling %qD with "
6348 "a nonzero argument is unsafe", fndecl);
6351 /* For __builtin_frame_address, return what we've got. */
6352 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
6353 return tem;
6355 if (!REG_P (tem)
6356 && ! CONSTANT_P (tem))
6357 tem = copy_addr_to_reg (tem);
6358 return tem;
6362 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
6363 failed and the caller should emit a normal call. */
6365 static rtx
6366 expand_builtin_alloca (tree exp)
6368 rtx op0;
6369 rtx result;
6370 unsigned int align;
6371 tree fndecl = get_callee_fndecl (exp);
6372 HOST_WIDE_INT max_size;
6373 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6374 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
6375 bool valid_arglist
6376 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
6377 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
6378 VOID_TYPE)
6379 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
6380 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
6381 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
6383 if (!valid_arglist)
6384 return NULL_RTX;
6386 if ((alloca_for_var
6387 && warn_vla_limit >= HOST_WIDE_INT_MAX
6388 && warn_alloc_size_limit < warn_vla_limit)
6389 || (!alloca_for_var
6390 && warn_alloca_limit >= HOST_WIDE_INT_MAX
6391 && warn_alloc_size_limit < warn_alloca_limit
6394 /* -Walloca-larger-than and -Wvla-larger-than settings of
6395 less than HOST_WIDE_INT_MAX override the more general
6396 -Walloc-size-larger-than so unless either of the former
6397 options is smaller than the last one (wchich would imply
6398 that the call was already checked), check the alloca
6399 arguments for overflow. */
6400 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
6401 int idx[] = { 0, -1 };
6402 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
6405 /* Compute the argument. */
6406 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
6408 /* Compute the alignment. */
6409 align = (fcode == BUILT_IN_ALLOCA
6410 ? BIGGEST_ALIGNMENT
6411 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
6413 /* Compute the maximum size. */
6414 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
6415 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
6416 : -1);
6418 /* Allocate the desired space. If the allocation stems from the declaration
6419 of a variable-sized object, it cannot accumulate. */
6420 result
6421 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
6422 result = convert_memory_address (ptr_mode, result);
6424 /* Dynamic allocations for variables are recorded during gimplification. */
6425 if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
6426 record_dynamic_alloc (exp);
6428 return result;
6431 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
6432 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
6433 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
6434 handle_builtin_stack_restore function. */
6436 static rtx
6437 expand_asan_emit_allocas_unpoison (tree exp)
6439 tree arg0 = CALL_EXPR_ARG (exp, 0);
6440 tree arg1 = CALL_EXPR_ARG (exp, 1);
6441 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
6442 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
6443 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
6444 stack_pointer_rtx, NULL_RTX, 0,
6445 OPTAB_LIB_WIDEN);
6446 off = convert_modes (ptr_mode, Pmode, off, 0);
6447 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
6448 OPTAB_LIB_WIDEN);
6449 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
6450 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
6451 top, ptr_mode, bot, ptr_mode);
6452 return ret;
6455 /* Expand a call to bswap builtin in EXP.
6456 Return NULL_RTX if a normal call should be emitted rather than expanding the
6457 function in-line. If convenient, the result should be placed in TARGET.
6458 SUBTARGET may be used as the target for computing one of EXP's operands. */
6460 static rtx
6461 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
6462 rtx subtarget)
6464 tree arg;
6465 rtx op0;
6467 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
6468 return NULL_RTX;
6470 arg = CALL_EXPR_ARG (exp, 0);
6471 op0 = expand_expr (arg,
6472 subtarget && GET_MODE (subtarget) == target_mode
6473 ? subtarget : NULL_RTX,
6474 target_mode, EXPAND_NORMAL);
6475 if (GET_MODE (op0) != target_mode)
6476 op0 = convert_to_mode (target_mode, op0, 1);
6478 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
6480 gcc_assert (target);
6482 return convert_to_mode (target_mode, target, 1);
6485 /* Expand a call to a unary builtin in EXP.
6486 Return NULL_RTX if a normal call should be emitted rather than expanding the
6487 function in-line. If convenient, the result should be placed in TARGET.
6488 SUBTARGET may be used as the target for computing one of EXP's operands. */
6490 static rtx
6491 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
6492 rtx subtarget, optab op_optab)
6494 rtx op0;
6496 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
6497 return NULL_RTX;
6499 /* Compute the argument. */
6500 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
6501 (subtarget
6502 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
6503 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
6504 VOIDmode, EXPAND_NORMAL);
6505 /* Compute op, into TARGET if possible.
6506 Set TARGET to wherever the result comes back. */
6507 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
6508 op_optab, op0, target, op_optab != clrsb_optab);
6509 gcc_assert (target);
6511 return convert_to_mode (target_mode, target, 0);
6514 /* Expand a call to __builtin_expect. We just return our argument
6515 as the builtin_expect semantic should've been already executed by
6516 tree branch prediction pass. */
6518 static rtx
6519 expand_builtin_expect (tree exp, rtx target)
6521 tree arg;
6523 if (call_expr_nargs (exp) < 2)
6524 return const0_rtx;
6525 arg = CALL_EXPR_ARG (exp, 0);
6527 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
6528 /* When guessing was done, the hints should be already stripped away. */
6529 gcc_assert (!flag_guess_branch_prob
6530 || optimize == 0 || seen_error ());
6531 return target;
6534 /* Expand a call to __builtin_expect_with_probability. We just return our
6535 argument as the builtin_expect semantic should've been already executed by
6536 tree branch prediction pass. */
6538 static rtx
6539 expand_builtin_expect_with_probability (tree exp, rtx target)
6541 tree arg;
6543 if (call_expr_nargs (exp) < 3)
6544 return const0_rtx;
6545 arg = CALL_EXPR_ARG (exp, 0);
6547 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
6548 /* When guessing was done, the hints should be already stripped away. */
6549 gcc_assert (!flag_guess_branch_prob
6550 || optimize == 0 || seen_error ());
6551 return target;
6555 /* Expand a call to __builtin_assume_aligned. We just return our first
6556 argument as the builtin_assume_aligned semantic should've been already
6557 executed by CCP. */
6559 static rtx
6560 expand_builtin_assume_aligned (tree exp, rtx target)
6562 if (call_expr_nargs (exp) < 2)
6563 return const0_rtx;
6564 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
6565 EXPAND_NORMAL);
6566 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
6567 && (call_expr_nargs (exp) < 3
6568 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
6569 return target;
6572 void
6573 expand_builtin_trap (void)
6575 if (targetm.have_trap ())
6577 rtx_insn *insn = emit_insn (targetm.gen_trap ());
6578 /* For trap insns when not accumulating outgoing args force
6579 REG_ARGS_SIZE note to prevent crossjumping of calls with
6580 different args sizes. */
6581 if (!ACCUMULATE_OUTGOING_ARGS)
6582 add_args_size_note (insn, stack_pointer_delta);
6584 else
6586 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
6587 tree call_expr = build_call_expr (fn, 0);
6588 expand_call (call_expr, NULL_RTX, false);
6591 emit_barrier ();
6594 /* Expand a call to __builtin_unreachable. We do nothing except emit
6595 a barrier saying that control flow will not pass here.
6597 It is the responsibility of the program being compiled to ensure
6598 that control flow does never reach __builtin_unreachable. */
6599 static void
6600 expand_builtin_unreachable (void)
6602 emit_barrier ();
6605 /* Expand EXP, a call to fabs, fabsf or fabsl.
6606 Return NULL_RTX if a normal call should be emitted rather than expanding
6607 the function inline. If convenient, the result should be placed
6608 in TARGET. SUBTARGET may be used as the target for computing
6609 the operand. */
6611 static rtx
6612 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
6614 machine_mode mode;
6615 tree arg;
6616 rtx op0;
6618 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
6619 return NULL_RTX;
6621 arg = CALL_EXPR_ARG (exp, 0);
6622 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
6623 mode = TYPE_MODE (TREE_TYPE (arg));
6624 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6625 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
6628 /* Expand EXP, a call to copysign, copysignf, or copysignl.
6629 Return NULL is a normal call should be emitted rather than expanding the
6630 function inline. If convenient, the result should be placed in TARGET.
6631 SUBTARGET may be used as the target for computing the operand. */
6633 static rtx
6634 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
6636 rtx op0, op1;
6637 tree arg;
6639 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
6640 return NULL_RTX;
6642 arg = CALL_EXPR_ARG (exp, 0);
6643 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6645 arg = CALL_EXPR_ARG (exp, 1);
6646 op1 = expand_normal (arg);
6648 return expand_copysign (op0, op1, target);
6651 /* Expand a call to __builtin___clear_cache. */
6653 static rtx
6654 expand_builtin___clear_cache (tree exp)
6656 if (!targetm.code_for_clear_cache)
6658 #ifdef CLEAR_INSN_CACHE
6659 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
6660 does something. Just do the default expansion to a call to
6661 __clear_cache(). */
6662 return NULL_RTX;
6663 #else
6664 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
6665 does nothing. There is no need to call it. Do nothing. */
6666 return const0_rtx;
6667 #endif /* CLEAR_INSN_CACHE */
6670 /* We have a "clear_cache" insn, and it will handle everything. */
6671 tree begin, end;
6672 rtx begin_rtx, end_rtx;
6674 /* We must not expand to a library call. If we did, any
6675 fallback library function in libgcc that might contain a call to
6676 __builtin___clear_cache() would recurse infinitely. */
6677 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6679 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
6680 return const0_rtx;
6683 if (targetm.have_clear_cache ())
6685 class expand_operand ops[2];
6687 begin = CALL_EXPR_ARG (exp, 0);
6688 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
6690 end = CALL_EXPR_ARG (exp, 1);
6691 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
6693 create_address_operand (&ops[0], begin_rtx);
6694 create_address_operand (&ops[1], end_rtx);
6695 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
6696 return const0_rtx;
6698 return const0_rtx;
6701 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
6703 static rtx
6704 round_trampoline_addr (rtx tramp)
6706 rtx temp, addend, mask;
6708 /* If we don't need too much alignment, we'll have been guaranteed
6709 proper alignment by get_trampoline_type. */
6710 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
6711 return tramp;
6713 /* Round address up to desired boundary. */
6714 temp = gen_reg_rtx (Pmode);
6715 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
6716 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
6718 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
6719 temp, 0, OPTAB_LIB_WIDEN);
6720 tramp = expand_simple_binop (Pmode, AND, temp, mask,
6721 temp, 0, OPTAB_LIB_WIDEN);
6723 return tramp;
6726 static rtx
6727 expand_builtin_init_trampoline (tree exp, bool onstack)
6729 tree t_tramp, t_func, t_chain;
6730 rtx m_tramp, r_tramp, r_chain, tmp;
6732 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
6733 POINTER_TYPE, VOID_TYPE))
6734 return NULL_RTX;
6736 t_tramp = CALL_EXPR_ARG (exp, 0);
6737 t_func = CALL_EXPR_ARG (exp, 1);
6738 t_chain = CALL_EXPR_ARG (exp, 2);
6740 r_tramp = expand_normal (t_tramp);
6741 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
6742 MEM_NOTRAP_P (m_tramp) = 1;
6744 /* If ONSTACK, the TRAMP argument should be the address of a field
6745 within the local function's FRAME decl. Either way, let's see if
6746 we can fill in the MEM_ATTRs for this memory. */
6747 if (TREE_CODE (t_tramp) == ADDR_EXPR)
6748 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
6750 /* Creator of a heap trampoline is responsible for making sure the
6751 address is aligned to at least STACK_BOUNDARY. Normally malloc
6752 will ensure this anyhow. */
6753 tmp = round_trampoline_addr (r_tramp);
6754 if (tmp != r_tramp)
6756 m_tramp = change_address (m_tramp, BLKmode, tmp);
6757 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
6758 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
6761 /* The FUNC argument should be the address of the nested function.
6762 Extract the actual function decl to pass to the hook. */
6763 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
6764 t_func = TREE_OPERAND (t_func, 0);
6765 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
6767 r_chain = expand_normal (t_chain);
6769 /* Generate insns to initialize the trampoline. */
6770 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
6772 if (onstack)
6774 trampolines_created = 1;
6776 if (targetm.calls.custom_function_descriptors != 0)
6777 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
6778 "trampoline generated for nested function %qD", t_func);
6781 return const0_rtx;
6784 static rtx
6785 expand_builtin_adjust_trampoline (tree exp)
6787 rtx tramp;
6789 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6790 return NULL_RTX;
6792 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
6793 tramp = round_trampoline_addr (tramp);
6794 if (targetm.calls.trampoline_adjust_address)
6795 tramp = targetm.calls.trampoline_adjust_address (tramp);
6797 return tramp;
6800 /* Expand a call to the builtin descriptor initialization routine.
6801 A descriptor is made up of a couple of pointers to the static
6802 chain and the code entry in this order. */
6804 static rtx
6805 expand_builtin_init_descriptor (tree exp)
6807 tree t_descr, t_func, t_chain;
6808 rtx m_descr, r_descr, r_func, r_chain;
6810 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
6811 VOID_TYPE))
6812 return NULL_RTX;
6814 t_descr = CALL_EXPR_ARG (exp, 0);
6815 t_func = CALL_EXPR_ARG (exp, 1);
6816 t_chain = CALL_EXPR_ARG (exp, 2);
6818 r_descr = expand_normal (t_descr);
6819 m_descr = gen_rtx_MEM (BLKmode, r_descr);
6820 MEM_NOTRAP_P (m_descr) = 1;
6821 set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
6823 r_func = expand_normal (t_func);
6824 r_chain = expand_normal (t_chain);
6826 /* Generate insns to initialize the descriptor. */
6827 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
6828 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
6829 POINTER_SIZE / BITS_PER_UNIT), r_func);
6831 return const0_rtx;
6834 /* Expand a call to the builtin descriptor adjustment routine. */
6836 static rtx
6837 expand_builtin_adjust_descriptor (tree exp)
6839 rtx tramp;
6841 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6842 return NULL_RTX;
6844 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
6846 /* Unalign the descriptor to allow runtime identification. */
6847 tramp = plus_constant (ptr_mode, tramp,
6848 targetm.calls.custom_function_descriptors);
6850 return force_operand (tramp, NULL_RTX);
6853 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
6854 function. The function first checks whether the back end provides
6855 an insn to implement signbit for the respective mode. If not, it
6856 checks whether the floating point format of the value is such that
6857 the sign bit can be extracted. If that is not the case, error out.
6858 EXP is the expression that is a call to the builtin function; if
6859 convenient, the result should be placed in TARGET. */
6860 static rtx
6861 expand_builtin_signbit (tree exp, rtx target)
6863 const struct real_format *fmt;
6864 scalar_float_mode fmode;
6865 scalar_int_mode rmode, imode;
6866 tree arg;
6867 int word, bitpos;
6868 enum insn_code icode;
6869 rtx temp;
6870 location_t loc = EXPR_LOCATION (exp);
6872 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
6873 return NULL_RTX;
6875 arg = CALL_EXPR_ARG (exp, 0);
6876 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
6877 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
6878 fmt = REAL_MODE_FORMAT (fmode);
6880 arg = builtin_save_expr (arg);
6882 /* Expand the argument yielding a RTX expression. */
6883 temp = expand_normal (arg);
6885 /* Check if the back end provides an insn that handles signbit for the
6886 argument's mode. */
6887 icode = optab_handler (signbit_optab, fmode);
6888 if (icode != CODE_FOR_nothing)
6890 rtx_insn *last = get_last_insn ();
6891 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6892 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
6893 return target;
6894 delete_insns_since (last);
6897 /* For floating point formats without a sign bit, implement signbit
6898 as "ARG < 0.0". */
6899 bitpos = fmt->signbit_ro;
6900 if (bitpos < 0)
6902 /* But we can't do this if the format supports signed zero. */
6903 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
6905 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
6906 build_real (TREE_TYPE (arg), dconst0));
6907 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
6910 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
6912 imode = int_mode_for_mode (fmode).require ();
6913 temp = gen_lowpart (imode, temp);
6915 else
6917 imode = word_mode;
6918 /* Handle targets with different FP word orders. */
6919 if (FLOAT_WORDS_BIG_ENDIAN)
6920 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
6921 else
6922 word = bitpos / BITS_PER_WORD;
6923 temp = operand_subword_force (temp, word, fmode);
6924 bitpos = bitpos % BITS_PER_WORD;
6927 /* Force the intermediate word_mode (or narrower) result into a
6928 register. This avoids attempting to create paradoxical SUBREGs
6929 of floating point modes below. */
6930 temp = force_reg (imode, temp);
6932 /* If the bitpos is within the "result mode" lowpart, the operation
6933 can be implement with a single bitwise AND. Otherwise, we need
6934 a right shift and an AND. */
6936 if (bitpos < GET_MODE_BITSIZE (rmode))
6938 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
6940 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
6941 temp = gen_lowpart (rmode, temp);
6942 temp = expand_binop (rmode, and_optab, temp,
6943 immed_wide_int_const (mask, rmode),
6944 NULL_RTX, 1, OPTAB_LIB_WIDEN);
6946 else
6948 /* Perform a logical right shift to place the signbit in the least
6949 significant bit, then truncate the result to the desired mode
6950 and mask just this bit. */
6951 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
6952 temp = gen_lowpart (rmode, temp);
6953 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
6954 NULL_RTX, 1, OPTAB_LIB_WIDEN);
6957 return temp;
6960 /* Expand fork or exec calls. TARGET is the desired target of the
6961 call. EXP is the call. FN is the
6962 identificator of the actual function. IGNORE is nonzero if the
6963 value is to be ignored. */
6965 static rtx
6966 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
6968 tree id, decl;
6969 tree call;
6971 if (DECL_FUNCTION_CODE (fn) != BUILT_IN_FORK)
6973 tree path = CALL_EXPR_ARG (exp, 0);
6974 /* Detect unterminated path. */
6975 if (!check_read_access (exp, path))
6976 return NULL_RTX;
6978 /* Also detect unterminated first argument. */
6979 switch (DECL_FUNCTION_CODE (fn))
6981 case BUILT_IN_EXECL:
6982 case BUILT_IN_EXECLE:
6983 case BUILT_IN_EXECLP:
6984 if (!check_read_access (exp, path))
6985 return NULL_RTX;
6986 default:
6987 break;
6992 /* If we are not profiling, just call the function. */
6993 if (!profile_arc_flag)
6994 return NULL_RTX;
6996 /* Otherwise call the wrapper. This should be equivalent for the rest of
6997 compiler, so the code does not diverge, and the wrapper may run the
6998 code necessary for keeping the profiling sane. */
7000 switch (DECL_FUNCTION_CODE (fn))
7002 case BUILT_IN_FORK:
7003 id = get_identifier ("__gcov_fork");
7004 break;
7006 case BUILT_IN_EXECL:
7007 id = get_identifier ("__gcov_execl");
7008 break;
7010 case BUILT_IN_EXECV:
7011 id = get_identifier ("__gcov_execv");
7012 break;
7014 case BUILT_IN_EXECLP:
7015 id = get_identifier ("__gcov_execlp");
7016 break;
7018 case BUILT_IN_EXECLE:
7019 id = get_identifier ("__gcov_execle");
7020 break;
7022 case BUILT_IN_EXECVP:
7023 id = get_identifier ("__gcov_execvp");
7024 break;
7026 case BUILT_IN_EXECVE:
7027 id = get_identifier ("__gcov_execve");
7028 break;
7030 default:
7031 gcc_unreachable ();
7034 decl = build_decl (DECL_SOURCE_LOCATION (fn),
7035 FUNCTION_DECL, id, TREE_TYPE (fn));
7036 DECL_EXTERNAL (decl) = 1;
7037 TREE_PUBLIC (decl) = 1;
7038 DECL_ARTIFICIAL (decl) = 1;
7039 TREE_NOTHROW (decl) = 1;
7040 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
7041 DECL_VISIBILITY_SPECIFIED (decl) = 1;
7042 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
7043 return expand_call (call, target, ignore);
7048 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
7049 the pointer in these functions is void*, the tree optimizers may remove
7050 casts. The mode computed in expand_builtin isn't reliable either, due
7051 to __sync_bool_compare_and_swap.
7053 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
7054 group of builtins. This gives us log2 of the mode size. */
7056 static inline machine_mode
7057 get_builtin_sync_mode (int fcode_diff)
7059 /* The size is not negotiable, so ask not to get BLKmode in return
7060 if the target indicates that a smaller size would be better. */
7061 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
7064 /* Expand the memory expression LOC and return the appropriate memory operand
7065 for the builtin_sync operations. */
7067 static rtx
7068 get_builtin_sync_mem (tree loc, machine_mode mode)
7070 rtx addr, mem;
7071 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
7072 ? TREE_TYPE (TREE_TYPE (loc))
7073 : TREE_TYPE (loc));
7074 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
7076 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
7077 addr = convert_memory_address (addr_mode, addr);
7079 /* Note that we explicitly do not want any alias information for this
7080 memory, so that we kill all other live memories. Otherwise we don't
7081 satisfy the full barrier semantics of the intrinsic. */
7082 mem = gen_rtx_MEM (mode, addr);
7084 set_mem_addr_space (mem, addr_space);
7086 mem = validize_mem (mem);
7088 /* The alignment needs to be at least according to that of the mode. */
7089 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
7090 get_pointer_alignment (loc)));
7091 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
7092 MEM_VOLATILE_P (mem) = 1;
7094 return mem;
7097 /* Make sure an argument is in the right mode.
7098 EXP is the tree argument.
7099 MODE is the mode it should be in. */
7101 static rtx
7102 expand_expr_force_mode (tree exp, machine_mode mode)
7104 rtx val;
7105 machine_mode old_mode;
7107 if (TREE_CODE (exp) == SSA_NAME
7108 && TYPE_MODE (TREE_TYPE (exp)) != mode)
7110 /* Undo argument promotion if possible, as combine might not
7111 be able to do it later due to MEM_VOLATILE_P uses in the
7112 patterns. */
7113 gimple *g = get_gimple_for_ssa_name (exp);
7114 if (g && gimple_assign_cast_p (g))
7116 tree rhs = gimple_assign_rhs1 (g);
7117 tree_code code = gimple_assign_rhs_code (g);
7118 if (CONVERT_EXPR_CODE_P (code)
7119 && TYPE_MODE (TREE_TYPE (rhs)) == mode
7120 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
7121 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
7122 && (TYPE_PRECISION (TREE_TYPE (exp))
7123 > TYPE_PRECISION (TREE_TYPE (rhs))))
7124 exp = rhs;
7128 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
7129 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
7130 of CONST_INTs, where we know the old_mode only from the call argument. */
7132 old_mode = GET_MODE (val);
7133 if (old_mode == VOIDmode)
7134 old_mode = TYPE_MODE (TREE_TYPE (exp));
7135 val = convert_modes (mode, old_mode, val, 1);
7136 return val;
7140 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
7141 EXP is the CALL_EXPR. CODE is the rtx code
7142 that corresponds to the arithmetic or logical operation from the name;
7143 an exception here is that NOT actually means NAND. TARGET is an optional
7144 place for us to store the results; AFTER is true if this is the
7145 fetch_and_xxx form. */
7147 static rtx
7148 expand_builtin_sync_operation (machine_mode mode, tree exp,
7149 enum rtx_code code, bool after,
7150 rtx target)
7152 rtx val, mem;
7153 location_t loc = EXPR_LOCATION (exp);
7155 if (code == NOT && warn_sync_nand)
7157 tree fndecl = get_callee_fndecl (exp);
7158 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7160 static bool warned_f_a_n, warned_n_a_f;
7162 switch (fcode)
7164 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7165 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7166 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7167 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7168 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7169 if (warned_f_a_n)
7170 break;
7172 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
7173 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
7174 warned_f_a_n = true;
7175 break;
7177 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7178 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7179 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7180 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7181 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7182 if (warned_n_a_f)
7183 break;
7185 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
7186 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
7187 warned_n_a_f = true;
7188 break;
7190 default:
7191 gcc_unreachable ();
7195 /* Expand the operands. */
7196 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7197 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
7199 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
7200 after);
7203 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
7204 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
7205 true if this is the boolean form. TARGET is a place for us to store the
7206 results; this is NOT optional if IS_BOOL is true. */
7208 static rtx
7209 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
7210 bool is_bool, rtx target)
7212 rtx old_val, new_val, mem;
7213 rtx *pbool, *poval;
7215 /* Expand the operands. */
7216 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7217 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
7218 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
7220 pbool = poval = NULL;
7221 if (target != const0_rtx)
7223 if (is_bool)
7224 pbool = &target;
7225 else
7226 poval = &target;
7228 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
7229 false, MEMMODEL_SYNC_SEQ_CST,
7230 MEMMODEL_SYNC_SEQ_CST))
7231 return NULL_RTX;
7233 return target;
7236 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
7237 general form is actually an atomic exchange, and some targets only
7238 support a reduced form with the second argument being a constant 1.
7239 EXP is the CALL_EXPR; TARGET is an optional place for us to store
7240 the results. */
7242 static rtx
7243 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
7244 rtx target)
7246 rtx val, mem;
7248 /* Expand the operands. */
7249 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7250 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
7252 return expand_sync_lock_test_and_set (target, mem, val);
7255 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
7257 static void
7258 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
7260 rtx mem;
7262 /* Expand the operands. */
7263 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7265 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
7268 /* Given an integer representing an ``enum memmodel'', verify its
7269 correctness and return the memory model enum. */
7271 static enum memmodel
7272 get_memmodel (tree exp)
7274 rtx op;
7275 unsigned HOST_WIDE_INT val;
7276 location_t loc
7277 = expansion_point_location_if_in_system_header (input_location);
7279 /* If the parameter is not a constant, it's a run time value so we'll just
7280 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
7281 if (TREE_CODE (exp) != INTEGER_CST)
7282 return MEMMODEL_SEQ_CST;
7284 op = expand_normal (exp);
7286 val = INTVAL (op);
7287 if (targetm.memmodel_check)
7288 val = targetm.memmodel_check (val);
7289 else if (val & ~MEMMODEL_MASK)
7291 warning_at (loc, OPT_Winvalid_memory_model,
7292 "unknown architecture specifier in memory model to builtin");
7293 return MEMMODEL_SEQ_CST;
7296 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
7297 if (memmodel_base (val) >= MEMMODEL_LAST)
7299 warning_at (loc, OPT_Winvalid_memory_model,
7300 "invalid memory model argument to builtin");
7301 return MEMMODEL_SEQ_CST;
7304 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
7305 be conservative and promote consume to acquire. */
7306 if (val == MEMMODEL_CONSUME)
7307 val = MEMMODEL_ACQUIRE;
7309 return (enum memmodel) val;
7312 /* Expand the __atomic_exchange intrinsic:
7313 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
7314 EXP is the CALL_EXPR.
7315 TARGET is an optional place for us to store the results. */
7317 static rtx
7318 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
7320 rtx val, mem;
7321 enum memmodel model;
7323 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
7325 if (!flag_inline_atomics)
7326 return NULL_RTX;
7328 /* Expand the operands. */
7329 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7330 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
7332 return expand_atomic_exchange (target, mem, val, model);
7335 /* Expand the __atomic_compare_exchange intrinsic:
7336 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
7337 TYPE desired, BOOL weak,
7338 enum memmodel success,
7339 enum memmodel failure)
7340 EXP is the CALL_EXPR.
7341 TARGET is an optional place for us to store the results. */
7343 static rtx
7344 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
7345 rtx target)
7347 rtx expect, desired, mem, oldval;
7348 rtx_code_label *label;
7349 enum memmodel success, failure;
7350 tree weak;
7351 bool is_weak;
7352 location_t loc
7353 = expansion_point_location_if_in_system_header (input_location);
7355 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
7356 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
7358 if (failure > success)
7360 warning_at (loc, OPT_Winvalid_memory_model,
7361 "failure memory model cannot be stronger than success "
7362 "memory model for %<__atomic_compare_exchange%>");
7363 success = MEMMODEL_SEQ_CST;
7366 if (is_mm_release (failure) || is_mm_acq_rel (failure))
7368 warning_at (loc, OPT_Winvalid_memory_model,
7369 "invalid failure memory model for "
7370 "%<__atomic_compare_exchange%>");
7371 failure = MEMMODEL_SEQ_CST;
7372 success = MEMMODEL_SEQ_CST;
7376 if (!flag_inline_atomics)
7377 return NULL_RTX;
7379 /* Expand the operands. */
7380 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7382 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
7383 expect = convert_memory_address (Pmode, expect);
7384 expect = gen_rtx_MEM (mode, expect);
7385 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
7387 weak = CALL_EXPR_ARG (exp, 3);
7388 is_weak = false;
7389 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
7390 is_weak = true;
7392 if (target == const0_rtx)
7393 target = NULL;
7395 /* Lest the rtl backend create a race condition with an imporoper store
7396 to memory, always create a new pseudo for OLDVAL. */
7397 oldval = NULL;
7399 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
7400 is_weak, success, failure))
7401 return NULL_RTX;
7403 /* Conditionally store back to EXPECT, lest we create a race condition
7404 with an improper store to memory. */
7405 /* ??? With a rearrangement of atomics at the gimple level, we can handle
7406 the normal case where EXPECT is totally private, i.e. a register. At
7407 which point the store can be unconditional. */
7408 label = gen_label_rtx ();
7409 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
7410 GET_MODE (target), 1, label);
7411 emit_move_insn (expect, oldval);
7412 emit_label (label);
7414 return target;
7417 /* Helper function for expand_ifn_atomic_compare_exchange - expand
7418 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
7419 call. The weak parameter must be dropped to match the expected parameter
7420 list and the expected argument changed from value to pointer to memory
7421 slot. */
7423 static void
7424 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
7426 unsigned int z;
7427 vec<tree, va_gc> *vec;
7429 vec_alloc (vec, 5);
7430 vec->quick_push (gimple_call_arg (call, 0));
7431 tree expected = gimple_call_arg (call, 1);
7432 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
7433 TREE_TYPE (expected));
7434 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
7435 if (expd != x)
7436 emit_move_insn (x, expd);
7437 tree v = make_tree (TREE_TYPE (expected), x);
7438 vec->quick_push (build1 (ADDR_EXPR,
7439 build_pointer_type (TREE_TYPE (expected)), v));
7440 vec->quick_push (gimple_call_arg (call, 2));
7441 /* Skip the boolean weak parameter. */
7442 for (z = 4; z < 6; z++)
7443 vec->quick_push (gimple_call_arg (call, z));
7444 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
7445 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
7446 gcc_assert (bytes_log2 < 5);
7447 built_in_function fncode
7448 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
7449 + bytes_log2);
7450 tree fndecl = builtin_decl_explicit (fncode);
7451 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
7452 fndecl);
7453 tree exp = build_call_vec (boolean_type_node, fn, vec);
7454 tree lhs = gimple_call_lhs (call);
7455 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
7456 if (lhs)
7458 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
7459 if (GET_MODE (boolret) != mode)
7460 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
7461 x = force_reg (mode, x);
7462 write_complex_part (target, boolret, true);
7463 write_complex_part (target, x, false);
7467 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
7469 void
7470 expand_ifn_atomic_compare_exchange (gcall *call)
7472 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
7473 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
7474 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
7475 rtx expect, desired, mem, oldval, boolret;
7476 enum memmodel success, failure;
7477 tree lhs;
7478 bool is_weak;
7479 location_t loc
7480 = expansion_point_location_if_in_system_header (gimple_location (call));
7482 success = get_memmodel (gimple_call_arg (call, 4));
7483 failure = get_memmodel (gimple_call_arg (call, 5));
7485 if (failure > success)
7487 warning_at (loc, OPT_Winvalid_memory_model,
7488 "failure memory model cannot be stronger than success "
7489 "memory model for %<__atomic_compare_exchange%>");
7490 success = MEMMODEL_SEQ_CST;
7493 if (is_mm_release (failure) || is_mm_acq_rel (failure))
7495 warning_at (loc, OPT_Winvalid_memory_model,
7496 "invalid failure memory model for "
7497 "%<__atomic_compare_exchange%>");
7498 failure = MEMMODEL_SEQ_CST;
7499 success = MEMMODEL_SEQ_CST;
7502 if (!flag_inline_atomics)
7504 expand_ifn_atomic_compare_exchange_into_call (call, mode);
7505 return;
7508 /* Expand the operands. */
7509 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
7511 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
7512 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
7514 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
7516 boolret = NULL;
7517 oldval = NULL;
7519 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
7520 is_weak, success, failure))
7522 expand_ifn_atomic_compare_exchange_into_call (call, mode);
7523 return;
7526 lhs = gimple_call_lhs (call);
7527 if (lhs)
7529 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
7530 if (GET_MODE (boolret) != mode)
7531 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
7532 write_complex_part (target, boolret, true);
7533 write_complex_part (target, oldval, false);
7537 /* Expand the __atomic_load intrinsic:
7538 TYPE __atomic_load (TYPE *object, enum memmodel)
7539 EXP is the CALL_EXPR.
7540 TARGET is an optional place for us to store the results. */
7542 static rtx
7543 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
7545 rtx mem;
7546 enum memmodel model;
7548 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7549 if (is_mm_release (model) || is_mm_acq_rel (model))
7551 location_t loc
7552 = expansion_point_location_if_in_system_header (input_location);
7553 warning_at (loc, OPT_Winvalid_memory_model,
7554 "invalid memory model for %<__atomic_load%>");
7555 model = MEMMODEL_SEQ_CST;
7558 if (!flag_inline_atomics)
7559 return NULL_RTX;
7561 /* Expand the operand. */
7562 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7564 return expand_atomic_load (target, mem, model);
7568 /* Expand the __atomic_store intrinsic:
7569 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
7570 EXP is the CALL_EXPR.
7571 TARGET is an optional place for us to store the results. */
7573 static rtx
7574 expand_builtin_atomic_store (machine_mode mode, tree exp)
7576 rtx mem, val;
7577 enum memmodel model;
7579 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
7580 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
7581 || is_mm_release (model)))
7583 location_t loc
7584 = expansion_point_location_if_in_system_header (input_location);
7585 warning_at (loc, OPT_Winvalid_memory_model,
7586 "invalid memory model for %<__atomic_store%>");
7587 model = MEMMODEL_SEQ_CST;
7590 if (!flag_inline_atomics)
7591 return NULL_RTX;
7593 /* Expand the operands. */
7594 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7595 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
7597 return expand_atomic_store (mem, val, model, false);
7600 /* Expand the __atomic_fetch_XXX intrinsic:
7601 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
7602 EXP is the CALL_EXPR.
7603 TARGET is an optional place for us to store the results.
7604 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
7605 FETCH_AFTER is true if returning the result of the operation.
7606 FETCH_AFTER is false if returning the value before the operation.
7607 IGNORE is true if the result is not used.
7608 EXT_CALL is the correct builtin for an external call if this cannot be
7609 resolved to an instruction sequence. */
7611 static rtx
7612 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
7613 enum rtx_code code, bool fetch_after,
7614 bool ignore, enum built_in_function ext_call)
7616 rtx val, mem, ret;
7617 enum memmodel model;
7618 tree fndecl;
7619 tree addr;
7621 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
7623 /* Expand the operands. */
7624 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7625 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
7627 /* Only try generating instructions if inlining is turned on. */
7628 if (flag_inline_atomics)
7630 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
7631 if (ret)
7632 return ret;
7635 /* Return if a different routine isn't needed for the library call. */
7636 if (ext_call == BUILT_IN_NONE)
7637 return NULL_RTX;
7639 /* Change the call to the specified function. */
7640 fndecl = get_callee_fndecl (exp);
7641 addr = CALL_EXPR_FN (exp);
7642 STRIP_NOPS (addr);
7644 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
7645 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
7647 /* If we will emit code after the call, the call cannot be a tail call.
7648 If it is emitted as a tail call, a barrier is emitted after it, and
7649 then all trailing code is removed. */
7650 if (!ignore)
7651 CALL_EXPR_TAILCALL (exp) = 0;
7653 /* Expand the call here so we can emit trailing code. */
7654 ret = expand_call (exp, target, ignore);
7656 /* Replace the original function just in case it matters. */
7657 TREE_OPERAND (addr, 0) = fndecl;
7659 /* Then issue the arithmetic correction to return the right result. */
7660 if (!ignore)
7662 if (code == NOT)
7664 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
7665 OPTAB_LIB_WIDEN);
7666 ret = expand_simple_unop (mode, NOT, ret, target, true);
7668 else
7669 ret = expand_simple_binop (mode, code, ret, val, target, true,
7670 OPTAB_LIB_WIDEN);
7672 return ret;
7675 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
7677 void
7678 expand_ifn_atomic_bit_test_and (gcall *call)
7680 tree ptr = gimple_call_arg (call, 0);
7681 tree bit = gimple_call_arg (call, 1);
7682 tree flag = gimple_call_arg (call, 2);
7683 tree lhs = gimple_call_lhs (call);
7684 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
7685 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
7686 enum rtx_code code;
7687 optab optab;
7688 class expand_operand ops[5];
7690 gcc_assert (flag_inline_atomics);
7692 if (gimple_call_num_args (call) == 4)
7693 model = get_memmodel (gimple_call_arg (call, 3));
7695 rtx mem = get_builtin_sync_mem (ptr, mode);
7696 rtx val = expand_expr_force_mode (bit, mode);
7698 switch (gimple_call_internal_fn (call))
7700 case IFN_ATOMIC_BIT_TEST_AND_SET:
7701 code = IOR;
7702 optab = atomic_bit_test_and_set_optab;
7703 break;
7704 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
7705 code = XOR;
7706 optab = atomic_bit_test_and_complement_optab;
7707 break;
7708 case IFN_ATOMIC_BIT_TEST_AND_RESET:
7709 code = AND;
7710 optab = atomic_bit_test_and_reset_optab;
7711 break;
7712 default:
7713 gcc_unreachable ();
7716 if (lhs == NULL_TREE)
7718 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
7719 val, NULL_RTX, true, OPTAB_DIRECT);
7720 if (code == AND)
7721 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
7722 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
7723 return;
7726 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
7727 enum insn_code icode = direct_optab_handler (optab, mode);
7728 gcc_assert (icode != CODE_FOR_nothing);
7729 create_output_operand (&ops[0], target, mode);
7730 create_fixed_operand (&ops[1], mem);
7731 create_convert_operand_to (&ops[2], val, mode, true);
7732 create_integer_operand (&ops[3], model);
7733 create_integer_operand (&ops[4], integer_onep (flag));
7734 if (maybe_expand_insn (icode, 5, ops))
7735 return;
7737 rtx bitval = val;
7738 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
7739 val, NULL_RTX, true, OPTAB_DIRECT);
7740 rtx maskval = val;
7741 if (code == AND)
7742 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
7743 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
7744 code, model, false);
7745 if (integer_onep (flag))
7747 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
7748 NULL_RTX, true, OPTAB_DIRECT);
7749 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
7750 true, OPTAB_DIRECT);
7752 else
7753 result = expand_simple_binop (mode, AND, result, maskval, target, true,
7754 OPTAB_DIRECT);
7755 if (result != target)
7756 emit_move_insn (target, result);
7759 /* Expand an atomic clear operation.
7760 void _atomic_clear (BOOL *obj, enum memmodel)
7761 EXP is the call expression. */
7763 static rtx
7764 expand_builtin_atomic_clear (tree exp)
7766 machine_mode mode;
7767 rtx mem, ret;
7768 enum memmodel model;
7770 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
7771 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7772 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7774 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
7776 location_t loc
7777 = expansion_point_location_if_in_system_header (input_location);
7778 warning_at (loc, OPT_Winvalid_memory_model,
7779 "invalid memory model for %<__atomic_store%>");
7780 model = MEMMODEL_SEQ_CST;
7783 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
7784 Failing that, a store is issued by __atomic_store. The only way this can
7785 fail is if the bool type is larger than a word size. Unlikely, but
7786 handle it anyway for completeness. Assume a single threaded model since
7787 there is no atomic support in this case, and no barriers are required. */
7788 ret = expand_atomic_store (mem, const0_rtx, model, true);
7789 if (!ret)
7790 emit_move_insn (mem, const0_rtx);
7791 return const0_rtx;
7794 /* Expand an atomic test_and_set operation.
7795 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
7796 EXP is the call expression. */
7798 static rtx
7799 expand_builtin_atomic_test_and_set (tree exp, rtx target)
7801 rtx mem;
7802 enum memmodel model;
7803 machine_mode mode;
7805 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
7806 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7807 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7809 return expand_atomic_test_and_set (target, mem, model);
7813 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
7814 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
7816 static tree
7817 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
7819 int size;
7820 machine_mode mode;
7821 unsigned int mode_align, type_align;
7823 if (TREE_CODE (arg0) != INTEGER_CST)
7824 return NULL_TREE;
7826 /* We need a corresponding integer mode for the access to be lock-free. */
7827 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
7828 if (!int_mode_for_size (size, 0).exists (&mode))
7829 return boolean_false_node;
7831 mode_align = GET_MODE_ALIGNMENT (mode);
7833 if (TREE_CODE (arg1) == INTEGER_CST)
7835 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
7837 /* Either this argument is null, or it's a fake pointer encoding
7838 the alignment of the object. */
7839 val = least_bit_hwi (val);
7840 val *= BITS_PER_UNIT;
7842 if (val == 0 || mode_align < val)
7843 type_align = mode_align;
7844 else
7845 type_align = val;
7847 else
7849 tree ttype = TREE_TYPE (arg1);
7851 /* This function is usually invoked and folded immediately by the front
7852 end before anything else has a chance to look at it. The pointer
7853 parameter at this point is usually cast to a void *, so check for that
7854 and look past the cast. */
7855 if (CONVERT_EXPR_P (arg1)
7856 && POINTER_TYPE_P (ttype)
7857 && VOID_TYPE_P (TREE_TYPE (ttype))
7858 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
7859 arg1 = TREE_OPERAND (arg1, 0);
7861 ttype = TREE_TYPE (arg1);
7862 gcc_assert (POINTER_TYPE_P (ttype));
7864 /* Get the underlying type of the object. */
7865 ttype = TREE_TYPE (ttype);
7866 type_align = TYPE_ALIGN (ttype);
7869 /* If the object has smaller alignment, the lock free routines cannot
7870 be used. */
7871 if (type_align < mode_align)
7872 return boolean_false_node;
7874 /* Check if a compare_and_swap pattern exists for the mode which represents
7875 the required size. The pattern is not allowed to fail, so the existence
7876 of the pattern indicates support is present. Also require that an
7877 atomic load exists for the required size. */
7878 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
7879 return boolean_true_node;
7880 else
7881 return boolean_false_node;
7884 /* Return true if the parameters to call EXP represent an object which will
7885 always generate lock free instructions. The first argument represents the
7886 size of the object, and the second parameter is a pointer to the object
7887 itself. If NULL is passed for the object, then the result is based on
7888 typical alignment for an object of the specified size. Otherwise return
7889 false. */
7891 static rtx
7892 expand_builtin_atomic_always_lock_free (tree exp)
7894 tree size;
7895 tree arg0 = CALL_EXPR_ARG (exp, 0);
7896 tree arg1 = CALL_EXPR_ARG (exp, 1);
7898 if (TREE_CODE (arg0) != INTEGER_CST)
7900 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
7901 return const0_rtx;
7904 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
7905 if (size == boolean_true_node)
7906 return const1_rtx;
7907 return const0_rtx;
7910 /* Return a one or zero if it can be determined that object ARG1 of size ARG
7911 is lock free on this architecture. */
7913 static tree
7914 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
7916 if (!flag_inline_atomics)
7917 return NULL_TREE;
7919 /* If it isn't always lock free, don't generate a result. */
7920 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
7921 return boolean_true_node;
7923 return NULL_TREE;
7926 /* Return true if the parameters to call EXP represent an object which will
7927 always generate lock free instructions. The first argument represents the
7928 size of the object, and the second parameter is a pointer to the object
7929 itself. If NULL is passed for the object, then the result is based on
7930 typical alignment for an object of the specified size. Otherwise return
7931 NULL*/
7933 static rtx
7934 expand_builtin_atomic_is_lock_free (tree exp)
7936 tree size;
7937 tree arg0 = CALL_EXPR_ARG (exp, 0);
7938 tree arg1 = CALL_EXPR_ARG (exp, 1);
7940 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
7942 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
7943 return NULL_RTX;
7946 if (!flag_inline_atomics)
7947 return NULL_RTX;
7949 /* If the value is known at compile time, return the RTX for it. */
7950 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
7951 if (size == boolean_true_node)
7952 return const1_rtx;
7954 return NULL_RTX;
7957 /* Expand the __atomic_thread_fence intrinsic:
7958 void __atomic_thread_fence (enum memmodel)
7959 EXP is the CALL_EXPR. */
7961 static void
7962 expand_builtin_atomic_thread_fence (tree exp)
7964 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
7965 expand_mem_thread_fence (model);
7968 /* Expand the __atomic_signal_fence intrinsic:
7969 void __atomic_signal_fence (enum memmodel)
7970 EXP is the CALL_EXPR. */
7972 static void
7973 expand_builtin_atomic_signal_fence (tree exp)
7975 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
7976 expand_mem_signal_fence (model);
7979 /* Expand the __sync_synchronize intrinsic. */
7981 static void
7982 expand_builtin_sync_synchronize (void)
7984 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
7987 static rtx
7988 expand_builtin_thread_pointer (tree exp, rtx target)
7990 enum insn_code icode;
7991 if (!validate_arglist (exp, VOID_TYPE))
7992 return const0_rtx;
7993 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
7994 if (icode != CODE_FOR_nothing)
7996 class expand_operand op;
7997 /* If the target is not sutitable then create a new target. */
7998 if (target == NULL_RTX
7999 || !REG_P (target)
8000 || GET_MODE (target) != Pmode)
8001 target = gen_reg_rtx (Pmode);
8002 create_output_operand (&op, target, Pmode);
8003 expand_insn (icode, 1, &op);
8004 return target;
8006 error ("%<__builtin_thread_pointer%> is not supported on this target");
8007 return const0_rtx;
8010 static void
8011 expand_builtin_set_thread_pointer (tree exp)
8013 enum insn_code icode;
8014 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8015 return;
8016 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
8017 if (icode != CODE_FOR_nothing)
8019 class expand_operand op;
8020 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
8021 Pmode, EXPAND_NORMAL);
8022 create_input_operand (&op, val, Pmode);
8023 expand_insn (icode, 1, &op);
8024 return;
8026 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
8030 /* Emit code to restore the current value of stack. */
8032 static void
8033 expand_stack_restore (tree var)
8035 rtx_insn *prev;
8036 rtx sa = expand_normal (var);
8038 sa = convert_memory_address (Pmode, sa);
8040 prev = get_last_insn ();
8041 emit_stack_restore (SAVE_BLOCK, sa);
8043 record_new_stack_level ();
8045 fixup_args_size_notes (prev, get_last_insn (), 0);
8048 /* Emit code to save the current value of stack. */
8050 static rtx
8051 expand_stack_save (void)
8053 rtx ret = NULL_RTX;
8055 emit_stack_save (SAVE_BLOCK, &ret);
8056 return ret;
8059 /* Emit code to get the openacc gang, worker or vector id or size. */
8061 static rtx
8062 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
8064 const char *name;
8065 rtx fallback_retval;
8066 rtx_insn *(*gen_fn) (rtx, rtx);
8067 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
8069 case BUILT_IN_GOACC_PARLEVEL_ID:
8070 name = "__builtin_goacc_parlevel_id";
8071 fallback_retval = const0_rtx;
8072 gen_fn = targetm.gen_oacc_dim_pos;
8073 break;
8074 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8075 name = "__builtin_goacc_parlevel_size";
8076 fallback_retval = const1_rtx;
8077 gen_fn = targetm.gen_oacc_dim_size;
8078 break;
8079 default:
8080 gcc_unreachable ();
8083 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
8085 error ("%qs only supported in OpenACC code", name);
8086 return const0_rtx;
8089 tree arg = CALL_EXPR_ARG (exp, 0);
8090 if (TREE_CODE (arg) != INTEGER_CST)
8092 error ("non-constant argument 0 to %qs", name);
8093 return const0_rtx;
8096 int dim = TREE_INT_CST_LOW (arg);
8097 switch (dim)
8099 case GOMP_DIM_GANG:
8100 case GOMP_DIM_WORKER:
8101 case GOMP_DIM_VECTOR:
8102 break;
8103 default:
8104 error ("illegal argument 0 to %qs", name);
8105 return const0_rtx;
8108 if (ignore)
8109 return target;
8111 if (target == NULL_RTX)
8112 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8114 if (!targetm.have_oacc_dim_size ())
8116 emit_move_insn (target, fallback_retval);
8117 return target;
8120 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
8121 emit_insn (gen_fn (reg, GEN_INT (dim)));
8122 if (reg != target)
8123 emit_move_insn (target, reg);
8125 return target;
8128 /* Expand a string compare operation using a sequence of char comparison
8129 to get rid of the calling overhead, with result going to TARGET if
8130 that's convenient.
8132 VAR_STR is the variable string source;
8133 CONST_STR is the constant string source;
8134 LENGTH is the number of chars to compare;
8135 CONST_STR_N indicates which source string is the constant string;
8136 IS_MEMCMP indicates whether it's a memcmp or strcmp.
8138 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
8140 target = (int) (unsigned char) var_str[0]
8141 - (int) (unsigned char) const_str[0];
8142 if (target != 0)
8143 goto ne_label;
8145 target = (int) (unsigned char) var_str[length - 2]
8146 - (int) (unsigned char) const_str[length - 2];
8147 if (target != 0)
8148 goto ne_label;
8149 target = (int) (unsigned char) var_str[length - 1]
8150 - (int) (unsigned char) const_str[length - 1];
8151 ne_label:
8154 static rtx
8155 inline_string_cmp (rtx target, tree var_str, const char *const_str,
8156 unsigned HOST_WIDE_INT length,
8157 int const_str_n, machine_mode mode)
8159 HOST_WIDE_INT offset = 0;
8160 rtx var_rtx_array
8161 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
8162 rtx var_rtx = NULL_RTX;
8163 rtx const_rtx = NULL_RTX;
8164 rtx result = target ? target : gen_reg_rtx (mode);
8165 rtx_code_label *ne_label = gen_label_rtx ();
8166 tree unit_type_node = unsigned_char_type_node;
8167 scalar_int_mode unit_mode
8168 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
8170 start_sequence ();
8172 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
8174 var_rtx
8175 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
8176 const_rtx = c_readstr (const_str + offset, unit_mode);
8177 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
8178 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
8180 op0 = convert_modes (mode, unit_mode, op0, 1);
8181 op1 = convert_modes (mode, unit_mode, op1, 1);
8182 result = expand_simple_binop (mode, MINUS, op0, op1,
8183 result, 1, OPTAB_WIDEN);
8184 if (i < length - 1)
8185 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
8186 mode, true, ne_label);
8187 offset += GET_MODE_SIZE (unit_mode);
8190 emit_label (ne_label);
8191 rtx_insn *insns = get_insns ();
8192 end_sequence ();
8193 emit_insn (insns);
8195 return result;
8198 /* Inline expansion of a call to str(n)cmp and memcmp, with result going
8199 to TARGET if that's convenient.
8200 If the call is not been inlined, return NULL_RTX. */
8202 static rtx
8203 inline_expand_builtin_bytecmp (tree exp, rtx target)
8205 tree fndecl = get_callee_fndecl (exp);
8206 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8207 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
8209 /* Do NOT apply this inlining expansion when optimizing for size or
8210 optimization level below 2. */
8211 if (optimize < 2 || optimize_insn_for_size_p ())
8212 return NULL_RTX;
8214 gcc_checking_assert (fcode == BUILT_IN_STRCMP
8215 || fcode == BUILT_IN_STRNCMP
8216 || fcode == BUILT_IN_MEMCMP);
8218 /* On a target where the type of the call (int) has same or narrower presicion
8219 than unsigned char, give up the inlining expansion. */
8220 if (TYPE_PRECISION (unsigned_char_type_node)
8221 >= TYPE_PRECISION (TREE_TYPE (exp)))
8222 return NULL_RTX;
8224 tree arg1 = CALL_EXPR_ARG (exp, 0);
8225 tree arg2 = CALL_EXPR_ARG (exp, 1);
8226 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
8228 unsigned HOST_WIDE_INT len1 = 0;
8229 unsigned HOST_WIDE_INT len2 = 0;
8230 unsigned HOST_WIDE_INT len3 = 0;
8232 /* Get the object representation of the initializers of ARG1 and ARG2
8233 as strings, provided they refer to constant objects, with their byte
8234 sizes in LEN1 and LEN2, respectively. */
8235 const char *bytes1 = getbyterep (arg1, &len1);
8236 const char *bytes2 = getbyterep (arg2, &len2);
8238 /* Fail if neither argument refers to an initialized constant. */
8239 if (!bytes1 && !bytes2)
8240 return NULL_RTX;
8242 if (is_ncmp)
8244 /* Fail if the memcmp/strncmp bound is not a constant. */
8245 if (!tree_fits_uhwi_p (len3_tree))
8246 return NULL_RTX;
8248 len3 = tree_to_uhwi (len3_tree);
8250 if (fcode == BUILT_IN_MEMCMP)
8252 /* Fail if the memcmp bound is greater than the size of either
8253 of the two constant objects. */
8254 if ((bytes1 && len1 < len3)
8255 || (bytes2 && len2 < len3))
8256 return NULL_RTX;
8260 if (fcode != BUILT_IN_MEMCMP)
8262 /* For string functions (i.e., strcmp and strncmp) reduce LEN1
8263 and LEN2 to the length of the nul-terminated string stored
8264 in each. */
8265 if (bytes1 != NULL)
8266 len1 = strnlen (bytes1, len1) + 1;
8267 if (bytes2 != NULL)
8268 len2 = strnlen (bytes2, len2) + 1;
8271 /* See inline_string_cmp. */
8272 int const_str_n;
8273 if (!len1)
8274 const_str_n = 2;
8275 else if (!len2)
8276 const_str_n = 1;
8277 else if (len2 > len1)
8278 const_str_n = 1;
8279 else
8280 const_str_n = 2;
8282 /* For strncmp only, compute the new bound as the smallest of
8283 the lengths of the two strings (plus 1) and the bound provided
8284 to the function. */
8285 unsigned HOST_WIDE_INT bound = (const_str_n == 1) ? len1 : len2;
8286 if (is_ncmp && len3 < bound)
8287 bound = len3;
8289 /* If the bound of the comparison is larger than the threshold,
8290 do nothing. */
8291 if (bound > (unsigned HOST_WIDE_INT) param_builtin_string_cmp_inline_length)
8292 return NULL_RTX;
8294 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8296 /* Now, start inline expansion the call. */
8297 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
8298 (const_str_n == 1) ? bytes1 : bytes2, bound,
8299 const_str_n, mode);
8302 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
8303 represents the size of the first argument to that call, or VOIDmode
8304 if the argument is a pointer. IGNORE will be true if the result
8305 isn't used. */
8306 static rtx
8307 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
8308 bool ignore)
8310 rtx val, failsafe;
8311 unsigned nargs = call_expr_nargs (exp);
8313 tree arg0 = CALL_EXPR_ARG (exp, 0);
8315 if (mode == VOIDmode)
8317 mode = TYPE_MODE (TREE_TYPE (arg0));
8318 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
8321 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
8323 /* An optional second argument can be used as a failsafe value on
8324 some machines. If it isn't present, then the failsafe value is
8325 assumed to be 0. */
8326 if (nargs > 1)
8328 tree arg1 = CALL_EXPR_ARG (exp, 1);
8329 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
8331 else
8332 failsafe = const0_rtx;
8334 /* If the result isn't used, the behavior is undefined. It would be
8335 nice to emit a warning here, but path splitting means this might
8336 happen with legitimate code. So simply drop the builtin
8337 expansion in that case; we've handled any side-effects above. */
8338 if (ignore)
8339 return const0_rtx;
8341 /* If we don't have a suitable target, create one to hold the result. */
8342 if (target == NULL || GET_MODE (target) != mode)
8343 target = gen_reg_rtx (mode);
8345 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
8346 val = convert_modes (mode, VOIDmode, val, false);
8348 return targetm.speculation_safe_value (mode, target, val, failsafe);
8351 /* Expand an expression EXP that calls a built-in function,
8352 with result going to TARGET if that's convenient
8353 (and in mode MODE if that's convenient).
8354 SUBTARGET may be used as the target for computing one of EXP's operands.
8355 IGNORE is nonzero if the value is to be ignored. */
8358 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
8359 int ignore)
8361 tree fndecl = get_callee_fndecl (exp);
8362 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
8363 int flags;
8365 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8366 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
8368 /* When ASan is enabled, we don't want to expand some memory/string
8369 builtins and rely on libsanitizer's hooks. This allows us to avoid
8370 redundant checks and be sure, that possible overflow will be detected
8371 by ASan. */
8373 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8374 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
8375 return expand_call (exp, target, ignore);
8377 /* When not optimizing, generate calls to library functions for a certain
8378 set of builtins. */
8379 if (!optimize
8380 && !called_as_built_in (fndecl)
8381 && fcode != BUILT_IN_FORK
8382 && fcode != BUILT_IN_EXECL
8383 && fcode != BUILT_IN_EXECV
8384 && fcode != BUILT_IN_EXECLP
8385 && fcode != BUILT_IN_EXECLE
8386 && fcode != BUILT_IN_EXECVP
8387 && fcode != BUILT_IN_EXECVE
8388 && !ALLOCA_FUNCTION_CODE_P (fcode)
8389 && fcode != BUILT_IN_FREE)
8390 return expand_call (exp, target, ignore);
8392 /* The built-in function expanders test for target == const0_rtx
8393 to determine whether the function's result will be ignored. */
8394 if (ignore)
8395 target = const0_rtx;
8397 /* If the result of a pure or const built-in function is ignored, and
8398 none of its arguments are volatile, we can avoid expanding the
8399 built-in call and just evaluate the arguments for side-effects. */
8400 if (target == const0_rtx
8401 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
8402 && !(flags & ECF_LOOPING_CONST_OR_PURE))
8404 bool volatilep = false;
8405 tree arg;
8406 call_expr_arg_iterator iter;
8408 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
8409 if (TREE_THIS_VOLATILE (arg))
8411 volatilep = true;
8412 break;
8415 if (! volatilep)
8417 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
8418 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
8419 return const0_rtx;
8423 switch (fcode)
8425 CASE_FLT_FN (BUILT_IN_FABS):
8426 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8427 case BUILT_IN_FABSD32:
8428 case BUILT_IN_FABSD64:
8429 case BUILT_IN_FABSD128:
8430 target = expand_builtin_fabs (exp, target, subtarget);
8431 if (target)
8432 return target;
8433 break;
8435 CASE_FLT_FN (BUILT_IN_COPYSIGN):
8436 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
8437 target = expand_builtin_copysign (exp, target, subtarget);
8438 if (target)
8439 return target;
8440 break;
8442 /* Just do a normal library call if we were unable to fold
8443 the values. */
8444 CASE_FLT_FN (BUILT_IN_CABS):
8445 break;
8447 CASE_FLT_FN (BUILT_IN_FMA):
8448 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
8449 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
8450 if (target)
8451 return target;
8452 break;
8454 CASE_FLT_FN (BUILT_IN_ILOGB):
8455 if (! flag_unsafe_math_optimizations)
8456 break;
8457 gcc_fallthrough ();
8458 CASE_FLT_FN (BUILT_IN_ISINF):
8459 CASE_FLT_FN (BUILT_IN_FINITE):
8460 case BUILT_IN_ISFINITE:
8461 case BUILT_IN_ISNORMAL:
8462 target = expand_builtin_interclass_mathfn (exp, target);
8463 if (target)
8464 return target;
8465 break;
8467 CASE_FLT_FN (BUILT_IN_ICEIL):
8468 CASE_FLT_FN (BUILT_IN_LCEIL):
8469 CASE_FLT_FN (BUILT_IN_LLCEIL):
8470 CASE_FLT_FN (BUILT_IN_LFLOOR):
8471 CASE_FLT_FN (BUILT_IN_IFLOOR):
8472 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8473 target = expand_builtin_int_roundingfn (exp, target);
8474 if (target)
8475 return target;
8476 break;
8478 CASE_FLT_FN (BUILT_IN_IRINT):
8479 CASE_FLT_FN (BUILT_IN_LRINT):
8480 CASE_FLT_FN (BUILT_IN_LLRINT):
8481 CASE_FLT_FN (BUILT_IN_IROUND):
8482 CASE_FLT_FN (BUILT_IN_LROUND):
8483 CASE_FLT_FN (BUILT_IN_LLROUND):
8484 target = expand_builtin_int_roundingfn_2 (exp, target);
8485 if (target)
8486 return target;
8487 break;
8489 CASE_FLT_FN (BUILT_IN_POWI):
8490 target = expand_builtin_powi (exp, target);
8491 if (target)
8492 return target;
8493 break;
8495 CASE_FLT_FN (BUILT_IN_CEXPI):
8496 target = expand_builtin_cexpi (exp, target);
8497 gcc_assert (target);
8498 return target;
8500 CASE_FLT_FN (BUILT_IN_SIN):
8501 CASE_FLT_FN (BUILT_IN_COS):
8502 if (! flag_unsafe_math_optimizations)
8503 break;
8504 target = expand_builtin_mathfn_3 (exp, target, subtarget);
8505 if (target)
8506 return target;
8507 break;
8509 CASE_FLT_FN (BUILT_IN_SINCOS):
8510 if (! flag_unsafe_math_optimizations)
8511 break;
8512 target = expand_builtin_sincos (exp);
8513 if (target)
8514 return target;
8515 break;
8517 case BUILT_IN_APPLY_ARGS:
8518 return expand_builtin_apply_args ();
8520 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8521 FUNCTION with a copy of the parameters described by
8522 ARGUMENTS, and ARGSIZE. It returns a block of memory
8523 allocated on the stack into which is stored all the registers
8524 that might possibly be used for returning the result of a
8525 function. ARGUMENTS is the value returned by
8526 __builtin_apply_args. ARGSIZE is the number of bytes of
8527 arguments that must be copied. ??? How should this value be
8528 computed? We'll also need a safe worst case value for varargs
8529 functions. */
8530 case BUILT_IN_APPLY:
8531 if (!validate_arglist (exp, POINTER_TYPE,
8532 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
8533 && !validate_arglist (exp, REFERENCE_TYPE,
8534 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8535 return const0_rtx;
8536 else
8538 rtx ops[3];
8540 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
8541 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
8542 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
8544 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8547 /* __builtin_return (RESULT) causes the function to return the
8548 value described by RESULT. RESULT is address of the block of
8549 memory returned by __builtin_apply. */
8550 case BUILT_IN_RETURN:
8551 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8552 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
8553 return const0_rtx;
8555 case BUILT_IN_SAVEREGS:
8556 return expand_builtin_saveregs ();
8558 case BUILT_IN_VA_ARG_PACK:
8559 /* All valid uses of __builtin_va_arg_pack () are removed during
8560 inlining. */
8561 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
8562 return const0_rtx;
8564 case BUILT_IN_VA_ARG_PACK_LEN:
8565 /* All valid uses of __builtin_va_arg_pack_len () are removed during
8566 inlining. */
8567 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
8568 return const0_rtx;
8570 /* Return the address of the first anonymous stack arg. */
8571 case BUILT_IN_NEXT_ARG:
8572 if (fold_builtin_next_arg (exp, false))
8573 return const0_rtx;
8574 return expand_builtin_next_arg ();
8576 case BUILT_IN_CLEAR_CACHE:
8577 target = expand_builtin___clear_cache (exp);
8578 if (target)
8579 return target;
8580 break;
8582 case BUILT_IN_CLASSIFY_TYPE:
8583 return expand_builtin_classify_type (exp);
8585 case BUILT_IN_CONSTANT_P:
8586 return const0_rtx;
8588 case BUILT_IN_FRAME_ADDRESS:
8589 case BUILT_IN_RETURN_ADDRESS:
8590 return expand_builtin_frame_address (fndecl, exp);
8592 /* Returns the address of the area where the structure is returned.
8593 0 otherwise. */
8594 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
8595 if (call_expr_nargs (exp) != 0
8596 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
8597 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
8598 return const0_rtx;
8599 else
8600 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
8602 CASE_BUILT_IN_ALLOCA:
8603 target = expand_builtin_alloca (exp);
8604 if (target)
8605 return target;
8606 break;
8608 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
8609 return expand_asan_emit_allocas_unpoison (exp);
8611 case BUILT_IN_STACK_SAVE:
8612 return expand_stack_save ();
8614 case BUILT_IN_STACK_RESTORE:
8615 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
8616 return const0_rtx;
8618 case BUILT_IN_BSWAP16:
8619 case BUILT_IN_BSWAP32:
8620 case BUILT_IN_BSWAP64:
8621 case BUILT_IN_BSWAP128:
8622 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
8623 if (target)
8624 return target;
8625 break;
8627 CASE_INT_FN (BUILT_IN_FFS):
8628 target = expand_builtin_unop (target_mode, exp, target,
8629 subtarget, ffs_optab);
8630 if (target)
8631 return target;
8632 break;
8634 CASE_INT_FN (BUILT_IN_CLZ):
8635 target = expand_builtin_unop (target_mode, exp, target,
8636 subtarget, clz_optab);
8637 if (target)
8638 return target;
8639 break;
8641 CASE_INT_FN (BUILT_IN_CTZ):
8642 target = expand_builtin_unop (target_mode, exp, target,
8643 subtarget, ctz_optab);
8644 if (target)
8645 return target;
8646 break;
8648 CASE_INT_FN (BUILT_IN_CLRSB):
8649 target = expand_builtin_unop (target_mode, exp, target,
8650 subtarget, clrsb_optab);
8651 if (target)
8652 return target;
8653 break;
8655 CASE_INT_FN (BUILT_IN_POPCOUNT):
8656 target = expand_builtin_unop (target_mode, exp, target,
8657 subtarget, popcount_optab);
8658 if (target)
8659 return target;
8660 break;
8662 CASE_INT_FN (BUILT_IN_PARITY):
8663 target = expand_builtin_unop (target_mode, exp, target,
8664 subtarget, parity_optab);
8665 if (target)
8666 return target;
8667 break;
8669 case BUILT_IN_STRLEN:
8670 target = expand_builtin_strlen (exp, target, target_mode);
8671 if (target)
8672 return target;
8673 break;
8675 case BUILT_IN_STRNLEN:
8676 target = expand_builtin_strnlen (exp, target, target_mode);
8677 if (target)
8678 return target;
8679 break;
8681 case BUILT_IN_STRCAT:
8682 target = expand_builtin_strcat (exp);
8683 if (target)
8684 return target;
8685 break;
8687 case BUILT_IN_GETTEXT:
8688 case BUILT_IN_PUTS:
8689 case BUILT_IN_PUTS_UNLOCKED:
8690 case BUILT_IN_STRDUP:
8691 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8692 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
8693 break;
8695 case BUILT_IN_INDEX:
8696 case BUILT_IN_RINDEX:
8697 case BUILT_IN_STRCHR:
8698 case BUILT_IN_STRRCHR:
8699 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8700 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
8701 break;
8703 case BUILT_IN_FPUTS:
8704 case BUILT_IN_FPUTS_UNLOCKED:
8705 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8706 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
8707 break;
8709 case BUILT_IN_STRNDUP:
8710 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8711 check_read_access (exp, CALL_EXPR_ARG (exp, 0), CALL_EXPR_ARG (exp, 1));
8712 break;
8714 case BUILT_IN_STRCASECMP:
8715 case BUILT_IN_STRPBRK:
8716 case BUILT_IN_STRSPN:
8717 case BUILT_IN_STRCSPN:
8718 case BUILT_IN_STRSTR:
8719 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8721 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
8722 check_read_access (exp, CALL_EXPR_ARG (exp, 1));
8724 break;
8726 case BUILT_IN_STRCPY:
8727 target = expand_builtin_strcpy (exp, target);
8728 if (target)
8729 return target;
8730 break;
8732 case BUILT_IN_STRNCAT:
8733 target = expand_builtin_strncat (exp, target);
8734 if (target)
8735 return target;
8736 break;
8738 case BUILT_IN_STRNCPY:
8739 target = expand_builtin_strncpy (exp, target);
8740 if (target)
8741 return target;
8742 break;
8744 case BUILT_IN_STPCPY:
8745 target = expand_builtin_stpcpy (exp, target, mode);
8746 if (target)
8747 return target;
8748 break;
8750 case BUILT_IN_STPNCPY:
8751 target = expand_builtin_stpncpy (exp, target);
8752 if (target)
8753 return target;
8754 break;
8756 case BUILT_IN_MEMCHR:
8757 target = expand_builtin_memchr (exp, target);
8758 if (target)
8759 return target;
8760 break;
8762 case BUILT_IN_MEMCPY:
8763 target = expand_builtin_memcpy (exp, target);
8764 if (target)
8765 return target;
8766 break;
8768 case BUILT_IN_MEMMOVE:
8769 target = expand_builtin_memmove (exp, target);
8770 if (target)
8771 return target;
8772 break;
8774 case BUILT_IN_MEMPCPY:
8775 target = expand_builtin_mempcpy (exp, target);
8776 if (target)
8777 return target;
8778 break;
8780 case BUILT_IN_MEMSET:
8781 target = expand_builtin_memset (exp, target, mode);
8782 if (target)
8783 return target;
8784 break;
8786 case BUILT_IN_BZERO:
8787 target = expand_builtin_bzero (exp);
8788 if (target)
8789 return target;
8790 break;
8792 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
8793 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
8794 when changing it to a strcmp call. */
8795 case BUILT_IN_STRCMP_EQ:
8796 target = expand_builtin_memcmp (exp, target, true);
8797 if (target)
8798 return target;
8800 /* Change this call back to a BUILT_IN_STRCMP. */
8801 TREE_OPERAND (exp, 1)
8802 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
8804 /* Delete the last parameter. */
8805 unsigned int i;
8806 vec<tree, va_gc> *arg_vec;
8807 vec_alloc (arg_vec, 2);
8808 for (i = 0; i < 2; i++)
8809 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
8810 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
8811 /* FALLTHROUGH */
8813 case BUILT_IN_STRCMP:
8814 target = expand_builtin_strcmp (exp, target);
8815 if (target)
8816 return target;
8817 break;
8819 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
8820 back to a BUILT_IN_STRNCMP. */
8821 case BUILT_IN_STRNCMP_EQ:
8822 target = expand_builtin_memcmp (exp, target, true);
8823 if (target)
8824 return target;
8826 /* Change it back to a BUILT_IN_STRNCMP. */
8827 TREE_OPERAND (exp, 1)
8828 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
8829 /* FALLTHROUGH */
8831 case BUILT_IN_STRNCMP:
8832 target = expand_builtin_strncmp (exp, target, mode);
8833 if (target)
8834 return target;
8835 break;
8837 case BUILT_IN_BCMP:
8838 case BUILT_IN_MEMCMP:
8839 case BUILT_IN_MEMCMP_EQ:
8840 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
8841 if (target)
8842 return target;
8843 if (fcode == BUILT_IN_MEMCMP_EQ)
8845 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
8846 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
8848 break;
8850 case BUILT_IN_SETJMP:
8851 /* This should have been lowered to the builtins below. */
8852 gcc_unreachable ();
8854 case BUILT_IN_SETJMP_SETUP:
8855 /* __builtin_setjmp_setup is passed a pointer to an array of five words
8856 and the receiver label. */
8857 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8859 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
8860 VOIDmode, EXPAND_NORMAL);
8861 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
8862 rtx_insn *label_r = label_rtx (label);
8864 /* This is copied from the handling of non-local gotos. */
8865 expand_builtin_setjmp_setup (buf_addr, label_r);
8866 nonlocal_goto_handler_labels
8867 = gen_rtx_INSN_LIST (VOIDmode, label_r,
8868 nonlocal_goto_handler_labels);
8869 /* ??? Do not let expand_label treat us as such since we would
8870 not want to be both on the list of non-local labels and on
8871 the list of forced labels. */
8872 FORCED_LABEL (label) = 0;
8873 return const0_rtx;
8875 break;
8877 case BUILT_IN_SETJMP_RECEIVER:
8878 /* __builtin_setjmp_receiver is passed the receiver label. */
8879 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8881 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
8882 rtx_insn *label_r = label_rtx (label);
8884 expand_builtin_setjmp_receiver (label_r);
8885 return const0_rtx;
8887 break;
8889 /* __builtin_longjmp is passed a pointer to an array of five words.
8890 It's similar to the C library longjmp function but works with
8891 __builtin_setjmp above. */
8892 case BUILT_IN_LONGJMP:
8893 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8895 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
8896 VOIDmode, EXPAND_NORMAL);
8897 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
8899 if (value != const1_rtx)
8901 error ("%<__builtin_longjmp%> second argument must be 1");
8902 return const0_rtx;
8905 expand_builtin_longjmp (buf_addr, value);
8906 return const0_rtx;
8908 break;
8910 case BUILT_IN_NONLOCAL_GOTO:
8911 target = expand_builtin_nonlocal_goto (exp);
8912 if (target)
8913 return target;
8914 break;
8916 /* This updates the setjmp buffer that is its argument with the value
8917 of the current stack pointer. */
8918 case BUILT_IN_UPDATE_SETJMP_BUF:
8919 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8921 rtx buf_addr
8922 = expand_normal (CALL_EXPR_ARG (exp, 0));
8924 expand_builtin_update_setjmp_buf (buf_addr);
8925 return const0_rtx;
8927 break;
8929 case BUILT_IN_TRAP:
8930 expand_builtin_trap ();
8931 return const0_rtx;
8933 case BUILT_IN_UNREACHABLE:
8934 expand_builtin_unreachable ();
8935 return const0_rtx;
8937 CASE_FLT_FN (BUILT_IN_SIGNBIT):
8938 case BUILT_IN_SIGNBITD32:
8939 case BUILT_IN_SIGNBITD64:
8940 case BUILT_IN_SIGNBITD128:
8941 target = expand_builtin_signbit (exp, target);
8942 if (target)
8943 return target;
8944 break;
8946 /* Various hooks for the DWARF 2 __throw routine. */
8947 case BUILT_IN_UNWIND_INIT:
8948 expand_builtin_unwind_init ();
8949 return const0_rtx;
8950 case BUILT_IN_DWARF_CFA:
8951 return virtual_cfa_rtx;
8952 #ifdef DWARF2_UNWIND_INFO
8953 case BUILT_IN_DWARF_SP_COLUMN:
8954 return expand_builtin_dwarf_sp_column ();
8955 case BUILT_IN_INIT_DWARF_REG_SIZES:
8956 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
8957 return const0_rtx;
8958 #endif
8959 case BUILT_IN_FROB_RETURN_ADDR:
8960 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
8961 case BUILT_IN_EXTRACT_RETURN_ADDR:
8962 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
8963 case BUILT_IN_EH_RETURN:
8964 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
8965 CALL_EXPR_ARG (exp, 1));
8966 return const0_rtx;
8967 case BUILT_IN_EH_RETURN_DATA_REGNO:
8968 return expand_builtin_eh_return_data_regno (exp);
8969 case BUILT_IN_EXTEND_POINTER:
8970 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
8971 case BUILT_IN_EH_POINTER:
8972 return expand_builtin_eh_pointer (exp);
8973 case BUILT_IN_EH_FILTER:
8974 return expand_builtin_eh_filter (exp);
8975 case BUILT_IN_EH_COPY_VALUES:
8976 return expand_builtin_eh_copy_values (exp);
8978 case BUILT_IN_VA_START:
8979 return expand_builtin_va_start (exp);
8980 case BUILT_IN_VA_END:
8981 return expand_builtin_va_end (exp);
8982 case BUILT_IN_VA_COPY:
8983 return expand_builtin_va_copy (exp);
8984 case BUILT_IN_EXPECT:
8985 return expand_builtin_expect (exp, target);
8986 case BUILT_IN_EXPECT_WITH_PROBABILITY:
8987 return expand_builtin_expect_with_probability (exp, target);
8988 case BUILT_IN_ASSUME_ALIGNED:
8989 return expand_builtin_assume_aligned (exp, target);
8990 case BUILT_IN_PREFETCH:
8991 expand_builtin_prefetch (exp);
8992 return const0_rtx;
8994 case BUILT_IN_INIT_TRAMPOLINE:
8995 return expand_builtin_init_trampoline (exp, true);
8996 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
8997 return expand_builtin_init_trampoline (exp, false);
8998 case BUILT_IN_ADJUST_TRAMPOLINE:
8999 return expand_builtin_adjust_trampoline (exp);
9001 case BUILT_IN_INIT_DESCRIPTOR:
9002 return expand_builtin_init_descriptor (exp);
9003 case BUILT_IN_ADJUST_DESCRIPTOR:
9004 return expand_builtin_adjust_descriptor (exp);
9006 case BUILT_IN_FORK:
9007 case BUILT_IN_EXECL:
9008 case BUILT_IN_EXECV:
9009 case BUILT_IN_EXECLP:
9010 case BUILT_IN_EXECLE:
9011 case BUILT_IN_EXECVP:
9012 case BUILT_IN_EXECVE:
9013 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
9014 if (target)
9015 return target;
9016 break;
9018 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
9019 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
9020 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
9021 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
9022 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
9023 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
9024 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
9025 if (target)
9026 return target;
9027 break;
9029 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
9030 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
9031 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
9032 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
9033 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
9034 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
9035 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
9036 if (target)
9037 return target;
9038 break;
9040 case BUILT_IN_SYNC_FETCH_AND_OR_1:
9041 case BUILT_IN_SYNC_FETCH_AND_OR_2:
9042 case BUILT_IN_SYNC_FETCH_AND_OR_4:
9043 case BUILT_IN_SYNC_FETCH_AND_OR_8:
9044 case BUILT_IN_SYNC_FETCH_AND_OR_16:
9045 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
9046 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
9047 if (target)
9048 return target;
9049 break;
9051 case BUILT_IN_SYNC_FETCH_AND_AND_1:
9052 case BUILT_IN_SYNC_FETCH_AND_AND_2:
9053 case BUILT_IN_SYNC_FETCH_AND_AND_4:
9054 case BUILT_IN_SYNC_FETCH_AND_AND_8:
9055 case BUILT_IN_SYNC_FETCH_AND_AND_16:
9056 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
9057 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
9058 if (target)
9059 return target;
9060 break;
9062 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
9063 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
9064 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
9065 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
9066 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
9067 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
9068 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
9069 if (target)
9070 return target;
9071 break;
9073 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
9074 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
9075 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
9076 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
9077 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
9078 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
9079 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
9080 if (target)
9081 return target;
9082 break;
9084 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
9085 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
9086 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
9087 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
9088 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
9089 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
9090 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
9091 if (target)
9092 return target;
9093 break;
9095 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
9096 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
9097 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
9098 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
9099 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
9100 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
9101 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
9102 if (target)
9103 return target;
9104 break;
9106 case BUILT_IN_SYNC_OR_AND_FETCH_1:
9107 case BUILT_IN_SYNC_OR_AND_FETCH_2:
9108 case BUILT_IN_SYNC_OR_AND_FETCH_4:
9109 case BUILT_IN_SYNC_OR_AND_FETCH_8:
9110 case BUILT_IN_SYNC_OR_AND_FETCH_16:
9111 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
9112 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
9113 if (target)
9114 return target;
9115 break;
9117 case BUILT_IN_SYNC_AND_AND_FETCH_1:
9118 case BUILT_IN_SYNC_AND_AND_FETCH_2:
9119 case BUILT_IN_SYNC_AND_AND_FETCH_4:
9120 case BUILT_IN_SYNC_AND_AND_FETCH_8:
9121 case BUILT_IN_SYNC_AND_AND_FETCH_16:
9122 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
9123 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
9124 if (target)
9125 return target;
9126 break;
9128 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
9129 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
9130 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
9131 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
9132 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
9133 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
9134 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
9135 if (target)
9136 return target;
9137 break;
9139 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
9140 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
9141 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
9142 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
9143 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
9144 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
9145 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
9146 if (target)
9147 return target;
9148 break;
9150 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
9151 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
9152 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
9153 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
9154 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
9155 if (mode == VOIDmode)
9156 mode = TYPE_MODE (boolean_type_node);
9157 if (!target || !register_operand (target, mode))
9158 target = gen_reg_rtx (mode);
9160 mode = get_builtin_sync_mode
9161 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
9162 target = expand_builtin_compare_and_swap (mode, exp, true, target);
9163 if (target)
9164 return target;
9165 break;
9167 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
9168 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
9169 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
9170 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
9171 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
9172 mode = get_builtin_sync_mode
9173 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
9174 target = expand_builtin_compare_and_swap (mode, exp, false, target);
9175 if (target)
9176 return target;
9177 break;
9179 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
9180 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
9181 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
9182 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
9183 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
9184 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
9185 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
9186 if (target)
9187 return target;
9188 break;
9190 case BUILT_IN_SYNC_LOCK_RELEASE_1:
9191 case BUILT_IN_SYNC_LOCK_RELEASE_2:
9192 case BUILT_IN_SYNC_LOCK_RELEASE_4:
9193 case BUILT_IN_SYNC_LOCK_RELEASE_8:
9194 case BUILT_IN_SYNC_LOCK_RELEASE_16:
9195 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
9196 expand_builtin_sync_lock_release (mode, exp);
9197 return const0_rtx;
9199 case BUILT_IN_SYNC_SYNCHRONIZE:
9200 expand_builtin_sync_synchronize ();
9201 return const0_rtx;
9203 case BUILT_IN_ATOMIC_EXCHANGE_1:
9204 case BUILT_IN_ATOMIC_EXCHANGE_2:
9205 case BUILT_IN_ATOMIC_EXCHANGE_4:
9206 case BUILT_IN_ATOMIC_EXCHANGE_8:
9207 case BUILT_IN_ATOMIC_EXCHANGE_16:
9208 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
9209 target = expand_builtin_atomic_exchange (mode, exp, target);
9210 if (target)
9211 return target;
9212 break;
9214 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
9215 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
9216 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
9217 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
9218 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
9220 unsigned int nargs, z;
9221 vec<tree, va_gc> *vec;
9223 mode =
9224 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
9225 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
9226 if (target)
9227 return target;
9229 /* If this is turned into an external library call, the weak parameter
9230 must be dropped to match the expected parameter list. */
9231 nargs = call_expr_nargs (exp);
9232 vec_alloc (vec, nargs - 1);
9233 for (z = 0; z < 3; z++)
9234 vec->quick_push (CALL_EXPR_ARG (exp, z));
9235 /* Skip the boolean weak parameter. */
9236 for (z = 4; z < 6; z++)
9237 vec->quick_push (CALL_EXPR_ARG (exp, z));
9238 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
9239 break;
9242 case BUILT_IN_ATOMIC_LOAD_1:
9243 case BUILT_IN_ATOMIC_LOAD_2:
9244 case BUILT_IN_ATOMIC_LOAD_4:
9245 case BUILT_IN_ATOMIC_LOAD_8:
9246 case BUILT_IN_ATOMIC_LOAD_16:
9247 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
9248 target = expand_builtin_atomic_load (mode, exp, target);
9249 if (target)
9250 return target;
9251 break;
9253 case BUILT_IN_ATOMIC_STORE_1:
9254 case BUILT_IN_ATOMIC_STORE_2:
9255 case BUILT_IN_ATOMIC_STORE_4:
9256 case BUILT_IN_ATOMIC_STORE_8:
9257 case BUILT_IN_ATOMIC_STORE_16:
9258 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
9259 target = expand_builtin_atomic_store (mode, exp);
9260 if (target)
9261 return const0_rtx;
9262 break;
9264 case BUILT_IN_ATOMIC_ADD_FETCH_1:
9265 case BUILT_IN_ATOMIC_ADD_FETCH_2:
9266 case BUILT_IN_ATOMIC_ADD_FETCH_4:
9267 case BUILT_IN_ATOMIC_ADD_FETCH_8:
9268 case BUILT_IN_ATOMIC_ADD_FETCH_16:
9270 enum built_in_function lib;
9271 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
9272 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
9273 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
9274 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
9275 ignore, lib);
9276 if (target)
9277 return target;
9278 break;
9280 case BUILT_IN_ATOMIC_SUB_FETCH_1:
9281 case BUILT_IN_ATOMIC_SUB_FETCH_2:
9282 case BUILT_IN_ATOMIC_SUB_FETCH_4:
9283 case BUILT_IN_ATOMIC_SUB_FETCH_8:
9284 case BUILT_IN_ATOMIC_SUB_FETCH_16:
9286 enum built_in_function lib;
9287 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
9288 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
9289 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
9290 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
9291 ignore, lib);
9292 if (target)
9293 return target;
9294 break;
9296 case BUILT_IN_ATOMIC_AND_FETCH_1:
9297 case BUILT_IN_ATOMIC_AND_FETCH_2:
9298 case BUILT_IN_ATOMIC_AND_FETCH_4:
9299 case BUILT_IN_ATOMIC_AND_FETCH_8:
9300 case BUILT_IN_ATOMIC_AND_FETCH_16:
9302 enum built_in_function lib;
9303 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
9304 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
9305 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
9306 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
9307 ignore, lib);
9308 if (target)
9309 return target;
9310 break;
9312 case BUILT_IN_ATOMIC_NAND_FETCH_1:
9313 case BUILT_IN_ATOMIC_NAND_FETCH_2:
9314 case BUILT_IN_ATOMIC_NAND_FETCH_4:
9315 case BUILT_IN_ATOMIC_NAND_FETCH_8:
9316 case BUILT_IN_ATOMIC_NAND_FETCH_16:
9318 enum built_in_function lib;
9319 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
9320 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
9321 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
9322 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
9323 ignore, lib);
9324 if (target)
9325 return target;
9326 break;
9328 case BUILT_IN_ATOMIC_XOR_FETCH_1:
9329 case BUILT_IN_ATOMIC_XOR_FETCH_2:
9330 case BUILT_IN_ATOMIC_XOR_FETCH_4:
9331 case BUILT_IN_ATOMIC_XOR_FETCH_8:
9332 case BUILT_IN_ATOMIC_XOR_FETCH_16:
9334 enum built_in_function lib;
9335 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
9336 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
9337 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
9338 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
9339 ignore, lib);
9340 if (target)
9341 return target;
9342 break;
9344 case BUILT_IN_ATOMIC_OR_FETCH_1:
9345 case BUILT_IN_ATOMIC_OR_FETCH_2:
9346 case BUILT_IN_ATOMIC_OR_FETCH_4:
9347 case BUILT_IN_ATOMIC_OR_FETCH_8:
9348 case BUILT_IN_ATOMIC_OR_FETCH_16:
9350 enum built_in_function lib;
9351 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
9352 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
9353 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
9354 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
9355 ignore, lib);
9356 if (target)
9357 return target;
9358 break;
9360 case BUILT_IN_ATOMIC_FETCH_ADD_1:
9361 case BUILT_IN_ATOMIC_FETCH_ADD_2:
9362 case BUILT_IN_ATOMIC_FETCH_ADD_4:
9363 case BUILT_IN_ATOMIC_FETCH_ADD_8:
9364 case BUILT_IN_ATOMIC_FETCH_ADD_16:
9365 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
9366 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
9367 ignore, BUILT_IN_NONE);
9368 if (target)
9369 return target;
9370 break;
9372 case BUILT_IN_ATOMIC_FETCH_SUB_1:
9373 case BUILT_IN_ATOMIC_FETCH_SUB_2:
9374 case BUILT_IN_ATOMIC_FETCH_SUB_4:
9375 case BUILT_IN_ATOMIC_FETCH_SUB_8:
9376 case BUILT_IN_ATOMIC_FETCH_SUB_16:
9377 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
9378 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
9379 ignore, BUILT_IN_NONE);
9380 if (target)
9381 return target;
9382 break;
9384 case BUILT_IN_ATOMIC_FETCH_AND_1:
9385 case BUILT_IN_ATOMIC_FETCH_AND_2:
9386 case BUILT_IN_ATOMIC_FETCH_AND_4:
9387 case BUILT_IN_ATOMIC_FETCH_AND_8:
9388 case BUILT_IN_ATOMIC_FETCH_AND_16:
9389 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
9390 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
9391 ignore, BUILT_IN_NONE);
9392 if (target)
9393 return target;
9394 break;
9396 case BUILT_IN_ATOMIC_FETCH_NAND_1:
9397 case BUILT_IN_ATOMIC_FETCH_NAND_2:
9398 case BUILT_IN_ATOMIC_FETCH_NAND_4:
9399 case BUILT_IN_ATOMIC_FETCH_NAND_8:
9400 case BUILT_IN_ATOMIC_FETCH_NAND_16:
9401 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
9402 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
9403 ignore, BUILT_IN_NONE);
9404 if (target)
9405 return target;
9406 break;
9408 case BUILT_IN_ATOMIC_FETCH_XOR_1:
9409 case BUILT_IN_ATOMIC_FETCH_XOR_2:
9410 case BUILT_IN_ATOMIC_FETCH_XOR_4:
9411 case BUILT_IN_ATOMIC_FETCH_XOR_8:
9412 case BUILT_IN_ATOMIC_FETCH_XOR_16:
9413 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
9414 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
9415 ignore, BUILT_IN_NONE);
9416 if (target)
9417 return target;
9418 break;
9420 case BUILT_IN_ATOMIC_FETCH_OR_1:
9421 case BUILT_IN_ATOMIC_FETCH_OR_2:
9422 case BUILT_IN_ATOMIC_FETCH_OR_4:
9423 case BUILT_IN_ATOMIC_FETCH_OR_8:
9424 case BUILT_IN_ATOMIC_FETCH_OR_16:
9425 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
9426 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
9427 ignore, BUILT_IN_NONE);
9428 if (target)
9429 return target;
9430 break;
9432 case BUILT_IN_ATOMIC_TEST_AND_SET:
9433 return expand_builtin_atomic_test_and_set (exp, target);
9435 case BUILT_IN_ATOMIC_CLEAR:
9436 return expand_builtin_atomic_clear (exp);
9438 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9439 return expand_builtin_atomic_always_lock_free (exp);
9441 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9442 target = expand_builtin_atomic_is_lock_free (exp);
9443 if (target)
9444 return target;
9445 break;
9447 case BUILT_IN_ATOMIC_THREAD_FENCE:
9448 expand_builtin_atomic_thread_fence (exp);
9449 return const0_rtx;
9451 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
9452 expand_builtin_atomic_signal_fence (exp);
9453 return const0_rtx;
9455 case BUILT_IN_OBJECT_SIZE:
9456 return expand_builtin_object_size (exp);
9458 case BUILT_IN_MEMCPY_CHK:
9459 case BUILT_IN_MEMPCPY_CHK:
9460 case BUILT_IN_MEMMOVE_CHK:
9461 case BUILT_IN_MEMSET_CHK:
9462 target = expand_builtin_memory_chk (exp, target, mode, fcode);
9463 if (target)
9464 return target;
9465 break;
9467 case BUILT_IN_STRCPY_CHK:
9468 case BUILT_IN_STPCPY_CHK:
9469 case BUILT_IN_STRNCPY_CHK:
9470 case BUILT_IN_STPNCPY_CHK:
9471 case BUILT_IN_STRCAT_CHK:
9472 case BUILT_IN_STRNCAT_CHK:
9473 case BUILT_IN_SNPRINTF_CHK:
9474 case BUILT_IN_VSNPRINTF_CHK:
9475 maybe_emit_chk_warning (exp, fcode);
9476 break;
9478 case BUILT_IN_SPRINTF_CHK:
9479 case BUILT_IN_VSPRINTF_CHK:
9480 maybe_emit_sprintf_chk_warning (exp, fcode);
9481 break;
9483 case BUILT_IN_FREE:
9484 if (warn_free_nonheap_object)
9485 maybe_emit_free_warning (exp);
9486 break;
9488 case BUILT_IN_THREAD_POINTER:
9489 return expand_builtin_thread_pointer (exp, target);
9491 case BUILT_IN_SET_THREAD_POINTER:
9492 expand_builtin_set_thread_pointer (exp);
9493 return const0_rtx;
9495 case BUILT_IN_ACC_ON_DEVICE:
9496 /* Do library call, if we failed to expand the builtin when
9497 folding. */
9498 break;
9500 case BUILT_IN_GOACC_PARLEVEL_ID:
9501 case BUILT_IN_GOACC_PARLEVEL_SIZE:
9502 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
9504 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
9505 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
9507 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
9508 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
9509 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
9510 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
9511 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
9512 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
9513 return expand_speculation_safe_value (mode, exp, target, ignore);
9515 default: /* just do library call, if unknown builtin */
9516 break;
9519 /* The switch statement above can drop through to cause the function
9520 to be called normally. */
9521 return expand_call (exp, target, ignore);
9524 /* Determine whether a tree node represents a call to a built-in
9525 function. If the tree T is a call to a built-in function with
9526 the right number of arguments of the appropriate types, return
9527 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
9528 Otherwise the return value is END_BUILTINS. */
9530 enum built_in_function
9531 builtin_mathfn_code (const_tree t)
9533 const_tree fndecl, arg, parmlist;
9534 const_tree argtype, parmtype;
9535 const_call_expr_arg_iterator iter;
9537 if (TREE_CODE (t) != CALL_EXPR)
9538 return END_BUILTINS;
9540 fndecl = get_callee_fndecl (t);
9541 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
9542 return END_BUILTINS;
9544 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
9545 init_const_call_expr_arg_iterator (t, &iter);
9546 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
9548 /* If a function doesn't take a variable number of arguments,
9549 the last element in the list will have type `void'. */
9550 parmtype = TREE_VALUE (parmlist);
9551 if (VOID_TYPE_P (parmtype))
9553 if (more_const_call_expr_args_p (&iter))
9554 return END_BUILTINS;
9555 return DECL_FUNCTION_CODE (fndecl);
9558 if (! more_const_call_expr_args_p (&iter))
9559 return END_BUILTINS;
9561 arg = next_const_call_expr_arg (&iter);
9562 argtype = TREE_TYPE (arg);
9564 if (SCALAR_FLOAT_TYPE_P (parmtype))
9566 if (! SCALAR_FLOAT_TYPE_P (argtype))
9567 return END_BUILTINS;
9569 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
9571 if (! COMPLEX_FLOAT_TYPE_P (argtype))
9572 return END_BUILTINS;
9574 else if (POINTER_TYPE_P (parmtype))
9576 if (! POINTER_TYPE_P (argtype))
9577 return END_BUILTINS;
9579 else if (INTEGRAL_TYPE_P (parmtype))
9581 if (! INTEGRAL_TYPE_P (argtype))
9582 return END_BUILTINS;
9584 else
9585 return END_BUILTINS;
9588 /* Variable-length argument list. */
9589 return DECL_FUNCTION_CODE (fndecl);
9592 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
9593 evaluate to a constant. */
9595 static tree
9596 fold_builtin_constant_p (tree arg)
9598 /* We return 1 for a numeric type that's known to be a constant
9599 value at compile-time or for an aggregate type that's a
9600 literal constant. */
9601 STRIP_NOPS (arg);
9603 /* If we know this is a constant, emit the constant of one. */
9604 if (CONSTANT_CLASS_P (arg)
9605 || (TREE_CODE (arg) == CONSTRUCTOR
9606 && TREE_CONSTANT (arg)))
9607 return integer_one_node;
9608 if (TREE_CODE (arg) == ADDR_EXPR)
9610 tree op = TREE_OPERAND (arg, 0);
9611 if (TREE_CODE (op) == STRING_CST
9612 || (TREE_CODE (op) == ARRAY_REF
9613 && integer_zerop (TREE_OPERAND (op, 1))
9614 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
9615 return integer_one_node;
9618 /* If this expression has side effects, show we don't know it to be a
9619 constant. Likewise if it's a pointer or aggregate type since in
9620 those case we only want literals, since those are only optimized
9621 when generating RTL, not later.
9622 And finally, if we are compiling an initializer, not code, we
9623 need to return a definite result now; there's not going to be any
9624 more optimization done. */
9625 if (TREE_SIDE_EFFECTS (arg)
9626 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
9627 || POINTER_TYPE_P (TREE_TYPE (arg))
9628 || cfun == 0
9629 || folding_initializer
9630 || force_folding_builtin_constant_p)
9631 return integer_zero_node;
9633 return NULL_TREE;
9636 /* Create builtin_expect or builtin_expect_with_probability
9637 with PRED and EXPECTED as its arguments and return it as a truthvalue.
9638 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
9639 builtin_expect_with_probability instead uses third argument as PROBABILITY
9640 value. */
9642 static tree
9643 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
9644 tree predictor, tree probability)
9646 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
9648 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
9649 : BUILT_IN_EXPECT_WITH_PROBABILITY);
9650 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
9651 ret_type = TREE_TYPE (TREE_TYPE (fn));
9652 pred_type = TREE_VALUE (arg_types);
9653 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
9655 pred = fold_convert_loc (loc, pred_type, pred);
9656 expected = fold_convert_loc (loc, expected_type, expected);
9658 if (probability)
9659 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
9660 else
9661 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
9662 predictor);
9664 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
9665 build_int_cst (ret_type, 0));
9668 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
9669 NULL_TREE if no simplification is possible. */
9671 tree
9672 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
9673 tree arg3)
9675 tree inner, fndecl, inner_arg0;
9676 enum tree_code code;
9678 /* Distribute the expected value over short-circuiting operators.
9679 See through the cast from truthvalue_type_node to long. */
9680 inner_arg0 = arg0;
9681 while (CONVERT_EXPR_P (inner_arg0)
9682 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
9683 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
9684 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
9686 /* If this is a builtin_expect within a builtin_expect keep the
9687 inner one. See through a comparison against a constant. It
9688 might have been added to create a thruthvalue. */
9689 inner = inner_arg0;
9691 if (COMPARISON_CLASS_P (inner)
9692 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
9693 inner = TREE_OPERAND (inner, 0);
9695 if (TREE_CODE (inner) == CALL_EXPR
9696 && (fndecl = get_callee_fndecl (inner))
9697 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
9698 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
9699 return arg0;
9701 inner = inner_arg0;
9702 code = TREE_CODE (inner);
9703 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
9705 tree op0 = TREE_OPERAND (inner, 0);
9706 tree op1 = TREE_OPERAND (inner, 1);
9707 arg1 = save_expr (arg1);
9709 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
9710 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
9711 inner = build2 (code, TREE_TYPE (inner), op0, op1);
9713 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
9716 /* If the argument isn't invariant then there's nothing else we can do. */
9717 if (!TREE_CONSTANT (inner_arg0))
9718 return NULL_TREE;
9720 /* If we expect that a comparison against the argument will fold to
9721 a constant return the constant. In practice, this means a true
9722 constant or the address of a non-weak symbol. */
9723 inner = inner_arg0;
9724 STRIP_NOPS (inner);
9725 if (TREE_CODE (inner) == ADDR_EXPR)
9729 inner = TREE_OPERAND (inner, 0);
9731 while (TREE_CODE (inner) == COMPONENT_REF
9732 || TREE_CODE (inner) == ARRAY_REF);
9733 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
9734 return NULL_TREE;
9737 /* Otherwise, ARG0 already has the proper type for the return value. */
9738 return arg0;
9741 /* Fold a call to __builtin_classify_type with argument ARG. */
9743 static tree
9744 fold_builtin_classify_type (tree arg)
9746 if (arg == 0)
9747 return build_int_cst (integer_type_node, no_type_class);
9749 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
9752 /* Fold a call EXPR (which may be null) to __builtin_strlen with argument
9753 ARG. */
9755 static tree
9756 fold_builtin_strlen (location_t loc, tree expr, tree type, tree arg)
9758 if (!validate_arg (arg, POINTER_TYPE))
9759 return NULL_TREE;
9760 else
9762 c_strlen_data lendata = { };
9763 tree len = c_strlen (arg, 0, &lendata);
9765 if (len)
9766 return fold_convert_loc (loc, type, len);
9768 if (!lendata.decl)
9769 c_strlen (arg, 1, &lendata);
9771 if (lendata.decl)
9773 if (EXPR_HAS_LOCATION (arg))
9774 loc = EXPR_LOCATION (arg);
9775 else if (loc == UNKNOWN_LOCATION)
9776 loc = input_location;
9777 warn_string_no_nul (loc, expr, "strlen", arg, lendata.decl);
9780 return NULL_TREE;
9784 /* Fold a call to __builtin_inf or __builtin_huge_val. */
9786 static tree
9787 fold_builtin_inf (location_t loc, tree type, int warn)
9789 REAL_VALUE_TYPE real;
9791 /* __builtin_inff is intended to be usable to define INFINITY on all
9792 targets. If an infinity is not available, INFINITY expands "to a
9793 positive constant of type float that overflows at translation
9794 time", footnote "In this case, using INFINITY will violate the
9795 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
9796 Thus we pedwarn to ensure this constraint violation is
9797 diagnosed. */
9798 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
9799 pedwarn (loc, 0, "target format does not support infinity");
9801 real_inf (&real);
9802 return build_real (type, real);
9805 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
9806 NULL_TREE if no simplification can be made. */
9808 static tree
9809 fold_builtin_sincos (location_t loc,
9810 tree arg0, tree arg1, tree arg2)
9812 tree type;
9813 tree fndecl, call = NULL_TREE;
9815 if (!validate_arg (arg0, REAL_TYPE)
9816 || !validate_arg (arg1, POINTER_TYPE)
9817 || !validate_arg (arg2, POINTER_TYPE))
9818 return NULL_TREE;
9820 type = TREE_TYPE (arg0);
9822 /* Calculate the result when the argument is a constant. */
9823 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
9824 if (fn == END_BUILTINS)
9825 return NULL_TREE;
9827 /* Canonicalize sincos to cexpi. */
9828 if (TREE_CODE (arg0) == REAL_CST)
9830 tree complex_type = build_complex_type (type);
9831 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
9833 if (!call)
9835 if (!targetm.libc_has_function (function_c99_math_complex, type)
9836 || !builtin_decl_implicit_p (fn))
9837 return NULL_TREE;
9838 fndecl = builtin_decl_explicit (fn);
9839 call = build_call_expr_loc (loc, fndecl, 1, arg0);
9840 call = builtin_save_expr (call);
9843 tree ptype = build_pointer_type (type);
9844 arg1 = fold_convert (ptype, arg1);
9845 arg2 = fold_convert (ptype, arg2);
9846 return build2 (COMPOUND_EXPR, void_type_node,
9847 build2 (MODIFY_EXPR, void_type_node,
9848 build_fold_indirect_ref_loc (loc, arg1),
9849 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
9850 build2 (MODIFY_EXPR, void_type_node,
9851 build_fold_indirect_ref_loc (loc, arg2),
9852 fold_build1_loc (loc, REALPART_EXPR, type, call)));
9855 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9856 Return NULL_TREE if no simplification can be made. */
9858 static tree
9859 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9861 if (!validate_arg (arg1, POINTER_TYPE)
9862 || !validate_arg (arg2, POINTER_TYPE)
9863 || !validate_arg (len, INTEGER_TYPE))
9864 return NULL_TREE;
9866 /* If the LEN parameter is zero, return zero. */
9867 if (integer_zerop (len))
9868 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9869 arg1, arg2);
9871 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9872 if (operand_equal_p (arg1, arg2, 0))
9873 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9875 /* If len parameter is one, return an expression corresponding to
9876 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9877 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
9879 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9880 tree cst_uchar_ptr_node
9881 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9883 tree ind1
9884 = fold_convert_loc (loc, integer_type_node,
9885 build1 (INDIRECT_REF, cst_uchar_node,
9886 fold_convert_loc (loc,
9887 cst_uchar_ptr_node,
9888 arg1)));
9889 tree ind2
9890 = fold_convert_loc (loc, integer_type_node,
9891 build1 (INDIRECT_REF, cst_uchar_node,
9892 fold_convert_loc (loc,
9893 cst_uchar_ptr_node,
9894 arg2)));
9895 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9898 return NULL_TREE;
9901 /* Fold a call to builtin isascii with argument ARG. */
9903 static tree
9904 fold_builtin_isascii (location_t loc, tree arg)
9906 if (!validate_arg (arg, INTEGER_TYPE))
9907 return NULL_TREE;
9908 else
9910 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9911 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9912 build_int_cst (integer_type_node,
9913 ~ (unsigned HOST_WIDE_INT) 0x7f));
9914 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9915 arg, integer_zero_node);
9919 /* Fold a call to builtin toascii with argument ARG. */
9921 static tree
9922 fold_builtin_toascii (location_t loc, tree arg)
9924 if (!validate_arg (arg, INTEGER_TYPE))
9925 return NULL_TREE;
9927 /* Transform toascii(c) -> (c & 0x7f). */
9928 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9929 build_int_cst (integer_type_node, 0x7f));
9932 /* Fold a call to builtin isdigit with argument ARG. */
9934 static tree
9935 fold_builtin_isdigit (location_t loc, tree arg)
9937 if (!validate_arg (arg, INTEGER_TYPE))
9938 return NULL_TREE;
9939 else
9941 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9942 /* According to the C standard, isdigit is unaffected by locale.
9943 However, it definitely is affected by the target character set. */
9944 unsigned HOST_WIDE_INT target_digit0
9945 = lang_hooks.to_target_charset ('0');
9947 if (target_digit0 == 0)
9948 return NULL_TREE;
9950 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9951 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9952 build_int_cst (unsigned_type_node, target_digit0));
9953 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9954 build_int_cst (unsigned_type_node, 9));
9958 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9960 static tree
9961 fold_builtin_fabs (location_t loc, tree arg, tree type)
9963 if (!validate_arg (arg, REAL_TYPE))
9964 return NULL_TREE;
9966 arg = fold_convert_loc (loc, type, arg);
9967 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9970 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9972 static tree
9973 fold_builtin_abs (location_t loc, tree arg, tree type)
9975 if (!validate_arg (arg, INTEGER_TYPE))
9976 return NULL_TREE;
9978 arg = fold_convert_loc (loc, type, arg);
9979 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9982 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9984 static tree
9985 fold_builtin_carg (location_t loc, tree arg, tree type)
9987 if (validate_arg (arg, COMPLEX_TYPE)
9988 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9990 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9992 if (atan2_fn)
9994 tree new_arg = builtin_save_expr (arg);
9995 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9996 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9997 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
10001 return NULL_TREE;
10004 /* Fold a call to builtin frexp, we can assume the base is 2. */
10006 static tree
10007 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
10009 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
10010 return NULL_TREE;
10012 STRIP_NOPS (arg0);
10014 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
10015 return NULL_TREE;
10017 arg1 = build_fold_indirect_ref_loc (loc, arg1);
10019 /* Proceed if a valid pointer type was passed in. */
10020 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
10022 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
10023 tree frac, exp;
10025 switch (value->cl)
10027 case rvc_zero:
10028 /* For +-0, return (*exp = 0, +-0). */
10029 exp = integer_zero_node;
10030 frac = arg0;
10031 break;
10032 case rvc_nan:
10033 case rvc_inf:
10034 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
10035 return omit_one_operand_loc (loc, rettype, arg0, arg1);
10036 case rvc_normal:
10038 /* Since the frexp function always expects base 2, and in
10039 GCC normalized significands are already in the range
10040 [0.5, 1.0), we have exactly what frexp wants. */
10041 REAL_VALUE_TYPE frac_rvt = *value;
10042 SET_REAL_EXP (&frac_rvt, 0);
10043 frac = build_real (rettype, frac_rvt);
10044 exp = build_int_cst (integer_type_node, REAL_EXP (value));
10046 break;
10047 default:
10048 gcc_unreachable ();
10051 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
10052 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
10053 TREE_SIDE_EFFECTS (arg1) = 1;
10054 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
10057 return NULL_TREE;
10060 /* Fold a call to builtin modf. */
10062 static tree
10063 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
10065 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
10066 return NULL_TREE;
10068 STRIP_NOPS (arg0);
10070 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
10071 return NULL_TREE;
10073 arg1 = build_fold_indirect_ref_loc (loc, arg1);
10075 /* Proceed if a valid pointer type was passed in. */
10076 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
10078 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
10079 REAL_VALUE_TYPE trunc, frac;
10081 switch (value->cl)
10083 case rvc_nan:
10084 case rvc_zero:
10085 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
10086 trunc = frac = *value;
10087 break;
10088 case rvc_inf:
10089 /* For +-Inf, return (*arg1 = arg0, +-0). */
10090 frac = dconst0;
10091 frac.sign = value->sign;
10092 trunc = *value;
10093 break;
10094 case rvc_normal:
10095 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
10096 real_trunc (&trunc, VOIDmode, value);
10097 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
10098 /* If the original number was negative and already
10099 integral, then the fractional part is -0.0. */
10100 if (value->sign && frac.cl == rvc_zero)
10101 frac.sign = value->sign;
10102 break;
10105 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
10106 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
10107 build_real (rettype, trunc));
10108 TREE_SIDE_EFFECTS (arg1) = 1;
10109 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
10110 build_real (rettype, frac));
10113 return NULL_TREE;
10116 /* Given a location LOC, an interclass builtin function decl FNDECL
10117 and its single argument ARG, return an folded expression computing
10118 the same, or NULL_TREE if we either couldn't or didn't want to fold
10119 (the latter happen if there's an RTL instruction available). */
10121 static tree
10122 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
10124 machine_mode mode;
10126 if (!validate_arg (arg, REAL_TYPE))
10127 return NULL_TREE;
10129 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
10130 return NULL_TREE;
10132 mode = TYPE_MODE (TREE_TYPE (arg));
10134 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
10136 /* If there is no optab, try generic code. */
10137 switch (DECL_FUNCTION_CODE (fndecl))
10139 tree result;
10141 CASE_FLT_FN (BUILT_IN_ISINF):
10143 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
10144 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
10145 tree type = TREE_TYPE (arg);
10146 REAL_VALUE_TYPE r;
10147 char buf[128];
10149 if (is_ibm_extended)
10151 /* NaN and Inf are encoded in the high-order double value
10152 only. The low-order value is not significant. */
10153 type = double_type_node;
10154 mode = DFmode;
10155 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
10157 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
10158 real_from_string (&r, buf);
10159 result = build_call_expr (isgr_fn, 2,
10160 fold_build1_loc (loc, ABS_EXPR, type, arg),
10161 build_real (type, r));
10162 return result;
10164 CASE_FLT_FN (BUILT_IN_FINITE):
10165 case BUILT_IN_ISFINITE:
10167 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
10168 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
10169 tree type = TREE_TYPE (arg);
10170 REAL_VALUE_TYPE r;
10171 char buf[128];
10173 if (is_ibm_extended)
10175 /* NaN and Inf are encoded in the high-order double value
10176 only. The low-order value is not significant. */
10177 type = double_type_node;
10178 mode = DFmode;
10179 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
10181 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
10182 real_from_string (&r, buf);
10183 result = build_call_expr (isle_fn, 2,
10184 fold_build1_loc (loc, ABS_EXPR, type, arg),
10185 build_real (type, r));
10186 /*result = fold_build2_loc (loc, UNGT_EXPR,
10187 TREE_TYPE (TREE_TYPE (fndecl)),
10188 fold_build1_loc (loc, ABS_EXPR, type, arg),
10189 build_real (type, r));
10190 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
10191 TREE_TYPE (TREE_TYPE (fndecl)),
10192 result);*/
10193 return result;
10195 case BUILT_IN_ISNORMAL:
10197 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
10198 islessequal(fabs(x),DBL_MAX). */
10199 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
10200 tree type = TREE_TYPE (arg);
10201 tree orig_arg, max_exp, min_exp;
10202 machine_mode orig_mode = mode;
10203 REAL_VALUE_TYPE rmax, rmin;
10204 char buf[128];
10206 orig_arg = arg = builtin_save_expr (arg);
10207 if (is_ibm_extended)
10209 /* Use double to test the normal range of IBM extended
10210 precision. Emin for IBM extended precision is
10211 different to emin for IEEE double, being 53 higher
10212 since the low double exponent is at least 53 lower
10213 than the high double exponent. */
10214 type = double_type_node;
10215 mode = DFmode;
10216 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
10218 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
10220 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
10221 real_from_string (&rmax, buf);
10222 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
10223 real_from_string (&rmin, buf);
10224 max_exp = build_real (type, rmax);
10225 min_exp = build_real (type, rmin);
10227 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
10228 if (is_ibm_extended)
10230 /* Testing the high end of the range is done just using
10231 the high double, using the same test as isfinite().
10232 For the subnormal end of the range we first test the
10233 high double, then if its magnitude is equal to the
10234 limit of 0x1p-969, we test whether the low double is
10235 non-zero and opposite sign to the high double. */
10236 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
10237 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
10238 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
10239 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
10240 arg, min_exp);
10241 tree as_complex = build1 (VIEW_CONVERT_EXPR,
10242 complex_double_type_node, orig_arg);
10243 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
10244 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
10245 tree zero = build_real (type, dconst0);
10246 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
10247 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
10248 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
10249 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
10250 fold_build3 (COND_EXPR,
10251 integer_type_node,
10252 hilt, logt, lolt));
10253 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
10254 eq_min, ok_lo);
10255 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
10256 gt_min, eq_min);
10258 else
10260 tree const isge_fn
10261 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
10262 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
10264 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
10265 max_exp, min_exp);
10266 return result;
10268 default:
10269 break;
10272 return NULL_TREE;
10275 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
10276 ARG is the argument for the call. */
10278 static tree
10279 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
10281 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10283 if (!validate_arg (arg, REAL_TYPE))
10284 return NULL_TREE;
10286 switch (builtin_index)
10288 case BUILT_IN_ISINF:
10289 if (!HONOR_INFINITIES (arg))
10290 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10292 return NULL_TREE;
10294 case BUILT_IN_ISINF_SIGN:
10296 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10297 /* In a boolean context, GCC will fold the inner COND_EXPR to
10298 1. So e.g. "if (isinf_sign(x))" would be folded to just
10299 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10300 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
10301 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
10302 tree tmp = NULL_TREE;
10304 arg = builtin_save_expr (arg);
10306 if (signbit_fn && isinf_fn)
10308 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
10309 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
10311 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10312 signbit_call, integer_zero_node);
10313 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10314 isinf_call, integer_zero_node);
10316 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
10317 integer_minus_one_node, integer_one_node);
10318 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10319 isinf_call, tmp,
10320 integer_zero_node);
10323 return tmp;
10326 case BUILT_IN_ISFINITE:
10327 if (!HONOR_NANS (arg)
10328 && !HONOR_INFINITIES (arg))
10329 return omit_one_operand_loc (loc, type, integer_one_node, arg);
10331 return NULL_TREE;
10333 case BUILT_IN_ISNAN:
10334 if (!HONOR_NANS (arg))
10335 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10338 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
10339 if (is_ibm_extended)
10341 /* NaN and Inf are encoded in the high-order double value
10342 only. The low-order value is not significant. */
10343 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
10346 arg = builtin_save_expr (arg);
10347 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
10349 default:
10350 gcc_unreachable ();
10354 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10355 This builtin will generate code to return the appropriate floating
10356 point classification depending on the value of the floating point
10357 number passed in. The possible return values must be supplied as
10358 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10359 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10360 one floating point argument which is "type generic". */
10362 static tree
10363 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
10365 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10366 arg, type, res, tmp;
10367 machine_mode mode;
10368 REAL_VALUE_TYPE r;
10369 char buf[128];
10371 /* Verify the required arguments in the original call. */
10372 if (nargs != 6
10373 || !validate_arg (args[0], INTEGER_TYPE)
10374 || !validate_arg (args[1], INTEGER_TYPE)
10375 || !validate_arg (args[2], INTEGER_TYPE)
10376 || !validate_arg (args[3], INTEGER_TYPE)
10377 || !validate_arg (args[4], INTEGER_TYPE)
10378 || !validate_arg (args[5], REAL_TYPE))
10379 return NULL_TREE;
10381 fp_nan = args[0];
10382 fp_infinite = args[1];
10383 fp_normal = args[2];
10384 fp_subnormal = args[3];
10385 fp_zero = args[4];
10386 arg = args[5];
10387 type = TREE_TYPE (arg);
10388 mode = TYPE_MODE (type);
10389 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10391 /* fpclassify(x) ->
10392 isnan(x) ? FP_NAN :
10393 (fabs(x) == Inf ? FP_INFINITE :
10394 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10395 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10397 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10398 build_real (type, dconst0));
10399 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10400 tmp, fp_zero, fp_subnormal);
10402 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10403 real_from_string (&r, buf);
10404 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10405 arg, build_real (type, r));
10406 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10408 if (HONOR_INFINITIES (mode))
10410 real_inf (&r);
10411 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10412 build_real (type, r));
10413 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10414 fp_infinite, res);
10417 if (HONOR_NANS (mode))
10419 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10420 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10423 return res;
10426 /* Fold a call to an unordered comparison function such as
10427 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10428 being called and ARG0 and ARG1 are the arguments for the call.
10429 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10430 the opposite of the desired result. UNORDERED_CODE is used
10431 for modes that can hold NaNs and ORDERED_CODE is used for
10432 the rest. */
10434 static tree
10435 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10436 enum tree_code unordered_code,
10437 enum tree_code ordered_code)
10439 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10440 enum tree_code code;
10441 tree type0, type1;
10442 enum tree_code code0, code1;
10443 tree cmp_type = NULL_TREE;
10445 type0 = TREE_TYPE (arg0);
10446 type1 = TREE_TYPE (arg1);
10448 code0 = TREE_CODE (type0);
10449 code1 = TREE_CODE (type1);
10451 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10452 /* Choose the wider of two real types. */
10453 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10454 ? type0 : type1;
10455 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10456 cmp_type = type0;
10457 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10458 cmp_type = type1;
10460 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10461 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10463 if (unordered_code == UNORDERED_EXPR)
10465 if (!HONOR_NANS (arg0))
10466 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10467 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10470 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
10471 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10472 fold_build2_loc (loc, code, type, arg0, arg1));
10475 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
10476 arithmetics if it can never overflow, or into internal functions that
10477 return both result of arithmetics and overflowed boolean flag in
10478 a complex integer result, or some other check for overflow.
10479 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
10480 checking part of that. */
10482 static tree
10483 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
10484 tree arg0, tree arg1, tree arg2)
10486 enum internal_fn ifn = IFN_LAST;
10487 /* The code of the expression corresponding to the built-in. */
10488 enum tree_code opcode = ERROR_MARK;
10489 bool ovf_only = false;
10491 switch (fcode)
10493 case BUILT_IN_ADD_OVERFLOW_P:
10494 ovf_only = true;
10495 /* FALLTHRU */
10496 case BUILT_IN_ADD_OVERFLOW:
10497 case BUILT_IN_SADD_OVERFLOW:
10498 case BUILT_IN_SADDL_OVERFLOW:
10499 case BUILT_IN_SADDLL_OVERFLOW:
10500 case BUILT_IN_UADD_OVERFLOW:
10501 case BUILT_IN_UADDL_OVERFLOW:
10502 case BUILT_IN_UADDLL_OVERFLOW:
10503 opcode = PLUS_EXPR;
10504 ifn = IFN_ADD_OVERFLOW;
10505 break;
10506 case BUILT_IN_SUB_OVERFLOW_P:
10507 ovf_only = true;
10508 /* FALLTHRU */
10509 case BUILT_IN_SUB_OVERFLOW:
10510 case BUILT_IN_SSUB_OVERFLOW:
10511 case BUILT_IN_SSUBL_OVERFLOW:
10512 case BUILT_IN_SSUBLL_OVERFLOW:
10513 case BUILT_IN_USUB_OVERFLOW:
10514 case BUILT_IN_USUBL_OVERFLOW:
10515 case BUILT_IN_USUBLL_OVERFLOW:
10516 opcode = MINUS_EXPR;
10517 ifn = IFN_SUB_OVERFLOW;
10518 break;
10519 case BUILT_IN_MUL_OVERFLOW_P:
10520 ovf_only = true;
10521 /* FALLTHRU */
10522 case BUILT_IN_MUL_OVERFLOW:
10523 case BUILT_IN_SMUL_OVERFLOW:
10524 case BUILT_IN_SMULL_OVERFLOW:
10525 case BUILT_IN_SMULLL_OVERFLOW:
10526 case BUILT_IN_UMUL_OVERFLOW:
10527 case BUILT_IN_UMULL_OVERFLOW:
10528 case BUILT_IN_UMULLL_OVERFLOW:
10529 opcode = MULT_EXPR;
10530 ifn = IFN_MUL_OVERFLOW;
10531 break;
10532 default:
10533 gcc_unreachable ();
10536 /* For the "generic" overloads, the first two arguments can have different
10537 types and the last argument determines the target type to use to check
10538 for overflow. The arguments of the other overloads all have the same
10539 type. */
10540 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
10542 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
10543 arguments are constant, attempt to fold the built-in call into a constant
10544 expression indicating whether or not it detected an overflow. */
10545 if (ovf_only
10546 && TREE_CODE (arg0) == INTEGER_CST
10547 && TREE_CODE (arg1) == INTEGER_CST)
10548 /* Perform the computation in the target type and check for overflow. */
10549 return omit_one_operand_loc (loc, boolean_type_node,
10550 arith_overflowed_p (opcode, type, arg0, arg1)
10551 ? boolean_true_node : boolean_false_node,
10552 arg2);
10554 tree intres, ovfres;
10555 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10557 intres = fold_binary_loc (loc, opcode, type,
10558 fold_convert_loc (loc, type, arg0),
10559 fold_convert_loc (loc, type, arg1));
10560 if (TREE_OVERFLOW (intres))
10561 intres = drop_tree_overflow (intres);
10562 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
10563 ? boolean_true_node : boolean_false_node);
10565 else
10567 tree ctype = build_complex_type (type);
10568 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
10569 arg0, arg1);
10570 tree tgt = save_expr (call);
10571 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
10572 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
10573 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
10576 if (ovf_only)
10577 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
10579 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
10580 tree store
10581 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
10582 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
10585 /* Fold a call to __builtin_FILE to a constant string. */
10587 static inline tree
10588 fold_builtin_FILE (location_t loc)
10590 if (const char *fname = LOCATION_FILE (loc))
10592 /* The documentation says this builtin is equivalent to the preprocessor
10593 __FILE__ macro so it appears appropriate to use the same file prefix
10594 mappings. */
10595 fname = remap_macro_filename (fname);
10596 return build_string_literal (strlen (fname) + 1, fname);
10599 return build_string_literal (1, "");
10602 /* Fold a call to __builtin_FUNCTION to a constant string. */
10604 static inline tree
10605 fold_builtin_FUNCTION ()
10607 const char *name = "";
10609 if (current_function_decl)
10610 name = lang_hooks.decl_printable_name (current_function_decl, 0);
10612 return build_string_literal (strlen (name) + 1, name);
10615 /* Fold a call to __builtin_LINE to an integer constant. */
10617 static inline tree
10618 fold_builtin_LINE (location_t loc, tree type)
10620 return build_int_cst (type, LOCATION_LINE (loc));
10623 /* Fold a call to built-in function FNDECL with 0 arguments.
10624 This function returns NULL_TREE if no simplification was possible. */
10626 static tree
10627 fold_builtin_0 (location_t loc, tree fndecl)
10629 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10630 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10631 switch (fcode)
10633 case BUILT_IN_FILE:
10634 return fold_builtin_FILE (loc);
10636 case BUILT_IN_FUNCTION:
10637 return fold_builtin_FUNCTION ();
10639 case BUILT_IN_LINE:
10640 return fold_builtin_LINE (loc, type);
10642 CASE_FLT_FN (BUILT_IN_INF):
10643 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
10644 case BUILT_IN_INFD32:
10645 case BUILT_IN_INFD64:
10646 case BUILT_IN_INFD128:
10647 return fold_builtin_inf (loc, type, true);
10649 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10650 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
10651 return fold_builtin_inf (loc, type, false);
10653 case BUILT_IN_CLASSIFY_TYPE:
10654 return fold_builtin_classify_type (NULL_TREE);
10656 default:
10657 break;
10659 return NULL_TREE;
10662 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10663 This function returns NULL_TREE if no simplification was possible. */
10665 static tree
10666 fold_builtin_1 (location_t loc, tree expr, tree fndecl, tree arg0)
10668 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10669 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10671 if (TREE_CODE (arg0) == ERROR_MARK)
10672 return NULL_TREE;
10674 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
10675 return ret;
10677 switch (fcode)
10679 case BUILT_IN_CONSTANT_P:
10681 tree val = fold_builtin_constant_p (arg0);
10683 /* Gimplification will pull the CALL_EXPR for the builtin out of
10684 an if condition. When not optimizing, we'll not CSE it back.
10685 To avoid link error types of regressions, return false now. */
10686 if (!val && !optimize)
10687 val = integer_zero_node;
10689 return val;
10692 case BUILT_IN_CLASSIFY_TYPE:
10693 return fold_builtin_classify_type (arg0);
10695 case BUILT_IN_STRLEN:
10696 return fold_builtin_strlen (loc, expr, type, arg0);
10698 CASE_FLT_FN (BUILT_IN_FABS):
10699 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
10700 case BUILT_IN_FABSD32:
10701 case BUILT_IN_FABSD64:
10702 case BUILT_IN_FABSD128:
10703 return fold_builtin_fabs (loc, arg0, type);
10705 case BUILT_IN_ABS:
10706 case BUILT_IN_LABS:
10707 case BUILT_IN_LLABS:
10708 case BUILT_IN_IMAXABS:
10709 return fold_builtin_abs (loc, arg0, type);
10711 CASE_FLT_FN (BUILT_IN_CONJ):
10712 if (validate_arg (arg0, COMPLEX_TYPE)
10713 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10714 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10715 break;
10717 CASE_FLT_FN (BUILT_IN_CREAL):
10718 if (validate_arg (arg0, COMPLEX_TYPE)
10719 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10720 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
10721 break;
10723 CASE_FLT_FN (BUILT_IN_CIMAG):
10724 if (validate_arg (arg0, COMPLEX_TYPE)
10725 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10726 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10727 break;
10729 CASE_FLT_FN (BUILT_IN_CARG):
10730 return fold_builtin_carg (loc, arg0, type);
10732 case BUILT_IN_ISASCII:
10733 return fold_builtin_isascii (loc, arg0);
10735 case BUILT_IN_TOASCII:
10736 return fold_builtin_toascii (loc, arg0);
10738 case BUILT_IN_ISDIGIT:
10739 return fold_builtin_isdigit (loc, arg0);
10741 CASE_FLT_FN (BUILT_IN_FINITE):
10742 case BUILT_IN_FINITED32:
10743 case BUILT_IN_FINITED64:
10744 case BUILT_IN_FINITED128:
10745 case BUILT_IN_ISFINITE:
10747 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10748 if (ret)
10749 return ret;
10750 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10753 CASE_FLT_FN (BUILT_IN_ISINF):
10754 case BUILT_IN_ISINFD32:
10755 case BUILT_IN_ISINFD64:
10756 case BUILT_IN_ISINFD128:
10758 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10759 if (ret)
10760 return ret;
10761 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10764 case BUILT_IN_ISNORMAL:
10765 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10767 case BUILT_IN_ISINF_SIGN:
10768 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10770 CASE_FLT_FN (BUILT_IN_ISNAN):
10771 case BUILT_IN_ISNAND32:
10772 case BUILT_IN_ISNAND64:
10773 case BUILT_IN_ISNAND128:
10774 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10776 case BUILT_IN_FREE:
10777 if (integer_zerop (arg0))
10778 return build_empty_stmt (loc);
10779 break;
10781 default:
10782 break;
10785 return NULL_TREE;
10789 /* Folds a call EXPR (which may be null) to built-in function FNDECL
10790 with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
10791 if no simplification was possible. */
10793 static tree
10794 fold_builtin_2 (location_t loc, tree expr, tree fndecl, tree arg0, tree arg1)
10796 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10797 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10799 if (TREE_CODE (arg0) == ERROR_MARK
10800 || TREE_CODE (arg1) == ERROR_MARK)
10801 return NULL_TREE;
10803 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
10804 return ret;
10806 switch (fcode)
10808 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10809 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10810 if (validate_arg (arg0, REAL_TYPE)
10811 && validate_arg (arg1, POINTER_TYPE))
10812 return do_mpfr_lgamma_r (arg0, arg1, type);
10813 break;
10815 CASE_FLT_FN (BUILT_IN_FREXP):
10816 return fold_builtin_frexp (loc, arg0, arg1, type);
10818 CASE_FLT_FN (BUILT_IN_MODF):
10819 return fold_builtin_modf (loc, arg0, arg1, type);
10821 case BUILT_IN_STRSPN:
10822 return fold_builtin_strspn (loc, expr, arg0, arg1);
10824 case BUILT_IN_STRCSPN:
10825 return fold_builtin_strcspn (loc, expr, arg0, arg1);
10827 case BUILT_IN_STRPBRK:
10828 return fold_builtin_strpbrk (loc, expr, arg0, arg1, type);
10830 case BUILT_IN_EXPECT:
10831 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
10833 case BUILT_IN_ISGREATER:
10834 return fold_builtin_unordered_cmp (loc, fndecl,
10835 arg0, arg1, UNLE_EXPR, LE_EXPR);
10836 case BUILT_IN_ISGREATEREQUAL:
10837 return fold_builtin_unordered_cmp (loc, fndecl,
10838 arg0, arg1, UNLT_EXPR, LT_EXPR);
10839 case BUILT_IN_ISLESS:
10840 return fold_builtin_unordered_cmp (loc, fndecl,
10841 arg0, arg1, UNGE_EXPR, GE_EXPR);
10842 case BUILT_IN_ISLESSEQUAL:
10843 return fold_builtin_unordered_cmp (loc, fndecl,
10844 arg0, arg1, UNGT_EXPR, GT_EXPR);
10845 case BUILT_IN_ISLESSGREATER:
10846 return fold_builtin_unordered_cmp (loc, fndecl,
10847 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10848 case BUILT_IN_ISUNORDERED:
10849 return fold_builtin_unordered_cmp (loc, fndecl,
10850 arg0, arg1, UNORDERED_EXPR,
10851 NOP_EXPR);
10853 /* We do the folding for va_start in the expander. */
10854 case BUILT_IN_VA_START:
10855 break;
10857 case BUILT_IN_OBJECT_SIZE:
10858 return fold_builtin_object_size (arg0, arg1);
10860 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10861 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10863 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10864 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10866 default:
10867 break;
10869 return NULL_TREE;
10872 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10873 and ARG2.
10874 This function returns NULL_TREE if no simplification was possible. */
10876 static tree
10877 fold_builtin_3 (location_t loc, tree fndecl,
10878 tree arg0, tree arg1, tree arg2)
10880 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10881 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10883 if (TREE_CODE (arg0) == ERROR_MARK
10884 || TREE_CODE (arg1) == ERROR_MARK
10885 || TREE_CODE (arg2) == ERROR_MARK)
10886 return NULL_TREE;
10888 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
10889 arg0, arg1, arg2))
10890 return ret;
10892 switch (fcode)
10895 CASE_FLT_FN (BUILT_IN_SINCOS):
10896 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10898 CASE_FLT_FN (BUILT_IN_REMQUO):
10899 if (validate_arg (arg0, REAL_TYPE)
10900 && validate_arg (arg1, REAL_TYPE)
10901 && validate_arg (arg2, POINTER_TYPE))
10902 return do_mpfr_remquo (arg0, arg1, arg2);
10903 break;
10905 case BUILT_IN_MEMCMP:
10906 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
10908 case BUILT_IN_EXPECT:
10909 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
10911 case BUILT_IN_EXPECT_WITH_PROBABILITY:
10912 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
10914 case BUILT_IN_ADD_OVERFLOW:
10915 case BUILT_IN_SUB_OVERFLOW:
10916 case BUILT_IN_MUL_OVERFLOW:
10917 case BUILT_IN_ADD_OVERFLOW_P:
10918 case BUILT_IN_SUB_OVERFLOW_P:
10919 case BUILT_IN_MUL_OVERFLOW_P:
10920 case BUILT_IN_SADD_OVERFLOW:
10921 case BUILT_IN_SADDL_OVERFLOW:
10922 case BUILT_IN_SADDLL_OVERFLOW:
10923 case BUILT_IN_SSUB_OVERFLOW:
10924 case BUILT_IN_SSUBL_OVERFLOW:
10925 case BUILT_IN_SSUBLL_OVERFLOW:
10926 case BUILT_IN_SMUL_OVERFLOW:
10927 case BUILT_IN_SMULL_OVERFLOW:
10928 case BUILT_IN_SMULLL_OVERFLOW:
10929 case BUILT_IN_UADD_OVERFLOW:
10930 case BUILT_IN_UADDL_OVERFLOW:
10931 case BUILT_IN_UADDLL_OVERFLOW:
10932 case BUILT_IN_USUB_OVERFLOW:
10933 case BUILT_IN_USUBL_OVERFLOW:
10934 case BUILT_IN_USUBLL_OVERFLOW:
10935 case BUILT_IN_UMUL_OVERFLOW:
10936 case BUILT_IN_UMULL_OVERFLOW:
10937 case BUILT_IN_UMULLL_OVERFLOW:
10938 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10940 default:
10941 break;
10943 return NULL_TREE;
10946 /* Folds a call EXPR (which may be null) to built-in function FNDECL.
10947 ARGS is an array of NARGS arguments. IGNORE is true if the result
10948 of the function call is ignored. This function returns NULL_TREE
10949 if no simplification was possible. */
10951 static tree
10952 fold_builtin_n (location_t loc, tree expr, tree fndecl, tree *args,
10953 int nargs, bool)
10955 tree ret = NULL_TREE;
10957 switch (nargs)
10959 case 0:
10960 ret = fold_builtin_0 (loc, fndecl);
10961 break;
10962 case 1:
10963 ret = fold_builtin_1 (loc, expr, fndecl, args[0]);
10964 break;
10965 case 2:
10966 ret = fold_builtin_2 (loc, expr, fndecl, args[0], args[1]);
10967 break;
10968 case 3:
10969 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
10970 break;
10971 default:
10972 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
10973 break;
10975 if (ret)
10977 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10978 SET_EXPR_LOCATION (ret, loc);
10979 return ret;
10981 return NULL_TREE;
10984 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10985 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10986 of arguments in ARGS to be omitted. OLDNARGS is the number of
10987 elements in ARGS. */
10989 static tree
10990 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10991 int skip, tree fndecl, int n, va_list newargs)
10993 int nargs = oldnargs - skip + n;
10994 tree *buffer;
10996 if (n > 0)
10998 int i, j;
11000 buffer = XALLOCAVEC (tree, nargs);
11001 for (i = 0; i < n; i++)
11002 buffer[i] = va_arg (newargs, tree);
11003 for (j = skip; j < oldnargs; j++, i++)
11004 buffer[i] = args[j];
11006 else
11007 buffer = args + skip;
11009 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
11012 /* Return true if FNDECL shouldn't be folded right now.
11013 If a built-in function has an inline attribute always_inline
11014 wrapper, defer folding it after always_inline functions have
11015 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11016 might not be performed. */
11018 bool
11019 avoid_folding_inline_builtin (tree fndecl)
11021 return (DECL_DECLARED_INLINE_P (fndecl)
11022 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11023 && cfun
11024 && !cfun->always_inline_functions_inlined
11025 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11028 /* A wrapper function for builtin folding that prevents warnings for
11029 "statement without effect" and the like, caused by removing the
11030 call node earlier than the warning is generated. */
11032 tree
11033 fold_call_expr (location_t loc, tree exp, bool ignore)
11035 tree ret = NULL_TREE;
11036 tree fndecl = get_callee_fndecl (exp);
11037 if (fndecl && fndecl_built_in_p (fndecl)
11038 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11039 yet. Defer folding until we see all the arguments
11040 (after inlining). */
11041 && !CALL_EXPR_VA_ARG_PACK (exp))
11043 int nargs = call_expr_nargs (exp);
11045 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11046 instead last argument is __builtin_va_arg_pack (). Defer folding
11047 even in that case, until arguments are finalized. */
11048 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11050 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11051 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
11052 return NULL_TREE;
11055 if (avoid_folding_inline_builtin (fndecl))
11056 return NULL_TREE;
11058 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11059 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
11060 CALL_EXPR_ARGP (exp), ignore);
11061 else
11063 tree *args = CALL_EXPR_ARGP (exp);
11064 ret = fold_builtin_n (loc, exp, fndecl, args, nargs, ignore);
11065 if (ret)
11066 return ret;
11069 return NULL_TREE;
11072 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
11073 N arguments are passed in the array ARGARRAY. Return a folded
11074 expression or NULL_TREE if no simplification was possible. */
11076 tree
11077 fold_builtin_call_array (location_t loc, tree,
11078 tree fn,
11079 int n,
11080 tree *argarray)
11082 if (TREE_CODE (fn) != ADDR_EXPR)
11083 return NULL_TREE;
11085 tree fndecl = TREE_OPERAND (fn, 0);
11086 if (TREE_CODE (fndecl) == FUNCTION_DECL
11087 && fndecl_built_in_p (fndecl))
11089 /* If last argument is __builtin_va_arg_pack (), arguments to this
11090 function are not finalized yet. Defer folding until they are. */
11091 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11093 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11094 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
11095 return NULL_TREE;
11097 if (avoid_folding_inline_builtin (fndecl))
11098 return NULL_TREE;
11099 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11100 return targetm.fold_builtin (fndecl, n, argarray, false);
11101 else
11102 return fold_builtin_n (loc, NULL_TREE, fndecl, argarray, n, false);
11105 return NULL_TREE;
11108 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11109 along with N new arguments specified as the "..." parameters. SKIP
11110 is the number of arguments in EXP to be omitted. This function is used
11111 to do varargs-to-varargs transformations. */
11113 static tree
11114 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11116 va_list ap;
11117 tree t;
11119 va_start (ap, n);
11120 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11121 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11122 va_end (ap);
11124 return t;
11127 /* Validate a single argument ARG against a tree code CODE representing
11128 a type. Return true when argument is valid. */
11130 static bool
11131 validate_arg (const_tree arg, enum tree_code code)
11133 if (!arg)
11134 return false;
11135 else if (code == POINTER_TYPE)
11136 return POINTER_TYPE_P (TREE_TYPE (arg));
11137 else if (code == INTEGER_TYPE)
11138 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11139 return code == TREE_CODE (TREE_TYPE (arg));
11142 /* This function validates the types of a function call argument list
11143 against a specified list of tree_codes. If the last specifier is a 0,
11144 that represents an ellipses, otherwise the last specifier must be a
11145 VOID_TYPE.
11147 This is the GIMPLE version of validate_arglist. Eventually we want to
11148 completely convert builtins.c to work from GIMPLEs and the tree based
11149 validate_arglist will then be removed. */
11151 bool
11152 validate_gimple_arglist (const gcall *call, ...)
11154 enum tree_code code;
11155 bool res = 0;
11156 va_list ap;
11157 const_tree arg;
11158 size_t i;
11160 va_start (ap, call);
11161 i = 0;
11165 code = (enum tree_code) va_arg (ap, int);
11166 switch (code)
11168 case 0:
11169 /* This signifies an ellipses, any further arguments are all ok. */
11170 res = true;
11171 goto end;
11172 case VOID_TYPE:
11173 /* This signifies an endlink, if no arguments remain, return
11174 true, otherwise return false. */
11175 res = (i == gimple_call_num_args (call));
11176 goto end;
11177 default:
11178 /* If no parameters remain or the parameter's code does not
11179 match the specified code, return false. Otherwise continue
11180 checking any remaining arguments. */
11181 arg = gimple_call_arg (call, i++);
11182 if (!validate_arg (arg, code))
11183 goto end;
11184 break;
11187 while (1);
11189 /* We need gotos here since we can only have one VA_CLOSE in a
11190 function. */
11191 end: ;
11192 va_end (ap);
11194 return res;
11197 /* Default target-specific builtin expander that does nothing. */
11200 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11201 rtx target ATTRIBUTE_UNUSED,
11202 rtx subtarget ATTRIBUTE_UNUSED,
11203 machine_mode mode ATTRIBUTE_UNUSED,
11204 int ignore ATTRIBUTE_UNUSED)
11206 return NULL_RTX;
11209 /* Returns true is EXP represents data that would potentially reside
11210 in a readonly section. */
11212 bool
11213 readonly_data_expr (tree exp)
11215 STRIP_NOPS (exp);
11217 if (TREE_CODE (exp) != ADDR_EXPR)
11218 return false;
11220 exp = get_base_address (TREE_OPERAND (exp, 0));
11221 if (!exp)
11222 return false;
11224 /* Make sure we call decl_readonly_section only for trees it
11225 can handle (since it returns true for everything it doesn't
11226 understand). */
11227 if (TREE_CODE (exp) == STRING_CST
11228 || TREE_CODE (exp) == CONSTRUCTOR
11229 || (VAR_P (exp) && TREE_STATIC (exp)))
11230 return decl_readonly_section (exp, 0);
11231 else
11232 return false;
11235 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11236 to the call, and TYPE is its return type.
11238 Return NULL_TREE if no simplification was possible, otherwise return the
11239 simplified form of the call as a tree.
11241 The simplified form may be a constant or other expression which
11242 computes the same value, but in a more efficient manner (including
11243 calls to other builtin functions).
11245 The call may contain arguments which need to be evaluated, but
11246 which are not useful to determine the result of the call. In
11247 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11248 COMPOUND_EXPR will be an argument which must be evaluated.
11249 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11250 COMPOUND_EXPR in the chain will contain the tree for the simplified
11251 form of the builtin function call. */
11253 static tree
11254 fold_builtin_strpbrk (location_t loc, tree, tree s1, tree s2, tree type)
11256 if (!validate_arg (s1, POINTER_TYPE)
11257 || !validate_arg (s2, POINTER_TYPE))
11258 return NULL_TREE;
11260 tree fn;
11261 const char *p1, *p2;
11263 p2 = c_getstr (s2);
11264 if (p2 == NULL)
11265 return NULL_TREE;
11267 p1 = c_getstr (s1);
11268 if (p1 != NULL)
11270 const char *r = strpbrk (p1, p2);
11271 tree tem;
11273 if (r == NULL)
11274 return build_int_cst (TREE_TYPE (s1), 0);
11276 /* Return an offset into the constant string argument. */
11277 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11278 return fold_convert_loc (loc, type, tem);
11281 if (p2[0] == '\0')
11282 /* strpbrk(x, "") == NULL.
11283 Evaluate and ignore s1 in case it had side-effects. */
11284 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
11286 if (p2[1] != '\0')
11287 return NULL_TREE; /* Really call strpbrk. */
11289 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11290 if (!fn)
11291 return NULL_TREE;
11293 /* New argument list transforming strpbrk(s1, s2) to
11294 strchr(s1, s2[0]). */
11295 return build_call_expr_loc (loc, fn, 2, s1,
11296 build_int_cst (integer_type_node, p2[0]));
11299 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11300 to the call.
11302 Return NULL_TREE if no simplification was possible, otherwise return the
11303 simplified form of the call as a tree.
11305 The simplified form may be a constant or other expression which
11306 computes the same value, but in a more efficient manner (including
11307 calls to other builtin functions).
11309 The call may contain arguments which need to be evaluated, but
11310 which are not useful to determine the result of the call. In
11311 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11312 COMPOUND_EXPR will be an argument which must be evaluated.
11313 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11314 COMPOUND_EXPR in the chain will contain the tree for the simplified
11315 form of the builtin function call. */
11317 static tree
11318 fold_builtin_strspn (location_t loc, tree expr, tree s1, tree s2)
11320 if (!validate_arg (s1, POINTER_TYPE)
11321 || !validate_arg (s2, POINTER_TYPE))
11322 return NULL_TREE;
11324 if (!check_nul_terminated_array (expr, s1)
11325 || !check_nul_terminated_array (expr, s2))
11326 return NULL_TREE;
11328 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11330 /* If either argument is "", return NULL_TREE. */
11331 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11332 /* Evaluate and ignore both arguments in case either one has
11333 side-effects. */
11334 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11335 s1, s2);
11336 return NULL_TREE;
11339 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11340 to the call.
11342 Return NULL_TREE if no simplification was possible, otherwise return the
11343 simplified form of the call as a tree.
11345 The simplified form may be a constant or other expression which
11346 computes the same value, but in a more efficient manner (including
11347 calls to other builtin functions).
11349 The call may contain arguments which need to be evaluated, but
11350 which are not useful to determine the result of the call. In
11351 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11352 COMPOUND_EXPR will be an argument which must be evaluated.
11353 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11354 COMPOUND_EXPR in the chain will contain the tree for the simplified
11355 form of the builtin function call. */
11357 static tree
11358 fold_builtin_strcspn (location_t loc, tree expr, tree s1, tree s2)
11360 if (!validate_arg (s1, POINTER_TYPE)
11361 || !validate_arg (s2, POINTER_TYPE))
11362 return NULL_TREE;
11364 if (!check_nul_terminated_array (expr, s1)
11365 || !check_nul_terminated_array (expr, s2))
11366 return NULL_TREE;
11368 /* If the first argument is "", return NULL_TREE. */
11369 const char *p1 = c_getstr (s1);
11370 if (p1 && *p1 == '\0')
11372 /* Evaluate and ignore argument s2 in case it has
11373 side-effects. */
11374 return omit_one_operand_loc (loc, size_type_node,
11375 size_zero_node, s2);
11378 /* If the second argument is "", return __builtin_strlen(s1). */
11379 const char *p2 = c_getstr (s2);
11380 if (p2 && *p2 == '\0')
11382 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11384 /* If the replacement _DECL isn't initialized, don't do the
11385 transformation. */
11386 if (!fn)
11387 return NULL_TREE;
11389 return build_call_expr_loc (loc, fn, 1, s1);
11391 return NULL_TREE;
11394 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11395 produced. False otherwise. This is done so that we don't output the error
11396 or warning twice or three times. */
11398 bool
11399 fold_builtin_next_arg (tree exp, bool va_start_p)
11401 tree fntype = TREE_TYPE (current_function_decl);
11402 int nargs = call_expr_nargs (exp);
11403 tree arg;
11404 /* There is good chance the current input_location points inside the
11405 definition of the va_start macro (perhaps on the token for
11406 builtin) in a system header, so warnings will not be emitted.
11407 Use the location in real source code. */
11408 location_t current_location =
11409 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11410 NULL);
11412 if (!stdarg_p (fntype))
11414 error ("%<va_start%> used in function with fixed arguments");
11415 return true;
11418 if (va_start_p)
11420 if (va_start_p && (nargs != 2))
11422 error ("wrong number of arguments to function %<va_start%>");
11423 return true;
11425 arg = CALL_EXPR_ARG (exp, 1);
11427 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11428 when we checked the arguments and if needed issued a warning. */
11429 else
11431 if (nargs == 0)
11433 /* Evidently an out of date version of <stdarg.h>; can't validate
11434 va_start's second argument, but can still work as intended. */
11435 warning_at (current_location,
11436 OPT_Wvarargs,
11437 "%<__builtin_next_arg%> called without an argument");
11438 return true;
11440 else if (nargs > 1)
11442 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11443 return true;
11445 arg = CALL_EXPR_ARG (exp, 0);
11448 if (TREE_CODE (arg) == SSA_NAME)
11449 arg = SSA_NAME_VAR (arg);
11451 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11452 or __builtin_next_arg (0) the first time we see it, after checking
11453 the arguments and if needed issuing a warning. */
11454 if (!integer_zerop (arg))
11456 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11458 /* Strip off all nops for the sake of the comparison. This
11459 is not quite the same as STRIP_NOPS. It does more.
11460 We must also strip off INDIRECT_EXPR for C++ reference
11461 parameters. */
11462 while (CONVERT_EXPR_P (arg)
11463 || TREE_CODE (arg) == INDIRECT_REF)
11464 arg = TREE_OPERAND (arg, 0);
11465 if (arg != last_parm)
11467 /* FIXME: Sometimes with the tree optimizers we can get the
11468 not the last argument even though the user used the last
11469 argument. We just warn and set the arg to be the last
11470 argument so that we will get wrong-code because of
11471 it. */
11472 warning_at (current_location,
11473 OPT_Wvarargs,
11474 "second parameter of %<va_start%> not last named argument");
11477 /* Undefined by C99 7.15.1.4p4 (va_start):
11478 "If the parameter parmN is declared with the register storage
11479 class, with a function or array type, or with a type that is
11480 not compatible with the type that results after application of
11481 the default argument promotions, the behavior is undefined."
11483 else if (DECL_REGISTER (arg))
11485 warning_at (current_location,
11486 OPT_Wvarargs,
11487 "undefined behavior when second parameter of "
11488 "%<va_start%> is declared with %<register%> storage");
11491 /* We want to verify the second parameter just once before the tree
11492 optimizers are run and then avoid keeping it in the tree,
11493 as otherwise we could warn even for correct code like:
11494 void foo (int i, ...)
11495 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11496 if (va_start_p)
11497 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11498 else
11499 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11501 return false;
11505 /* Expand a call EXP to __builtin_object_size. */
11507 static rtx
11508 expand_builtin_object_size (tree exp)
11510 tree ost;
11511 int object_size_type;
11512 tree fndecl = get_callee_fndecl (exp);
11514 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11516 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
11517 exp, fndecl);
11518 expand_builtin_trap ();
11519 return const0_rtx;
11522 ost = CALL_EXPR_ARG (exp, 1);
11523 STRIP_NOPS (ost);
11525 if (TREE_CODE (ost) != INTEGER_CST
11526 || tree_int_cst_sgn (ost) < 0
11527 || compare_tree_int (ost, 3) > 0)
11529 error ("%Klast argument of %qD is not integer constant between 0 and 3",
11530 exp, fndecl);
11531 expand_builtin_trap ();
11532 return const0_rtx;
11535 object_size_type = tree_to_shwi (ost);
11537 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11540 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11541 FCODE is the BUILT_IN_* to use.
11542 Return NULL_RTX if we failed; the caller should emit a normal call,
11543 otherwise try to get the result in TARGET, if convenient (and in
11544 mode MODE if that's convenient). */
11546 static rtx
11547 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
11548 enum built_in_function fcode)
11550 if (!validate_arglist (exp,
11551 POINTER_TYPE,
11552 fcode == BUILT_IN_MEMSET_CHK
11553 ? INTEGER_TYPE : POINTER_TYPE,
11554 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11555 return NULL_RTX;
11557 tree dest = CALL_EXPR_ARG (exp, 0);
11558 tree src = CALL_EXPR_ARG (exp, 1);
11559 tree len = CALL_EXPR_ARG (exp, 2);
11560 tree size = CALL_EXPR_ARG (exp, 3);
11562 /* FIXME: Set access mode to write only for memset et al. */
11563 bool sizes_ok = check_access (exp, len, /*maxread=*/NULL_TREE,
11564 /*srcstr=*/NULL_TREE, size, access_read_write);
11566 if (!tree_fits_uhwi_p (size))
11567 return NULL_RTX;
11569 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11571 /* Avoid transforming the checking call to an ordinary one when
11572 an overflow has been detected or when the call couldn't be
11573 validated because the size is not constant. */
11574 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
11575 return NULL_RTX;
11577 tree fn = NULL_TREE;
11578 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11579 mem{cpy,pcpy,move,set} is available. */
11580 switch (fcode)
11582 case BUILT_IN_MEMCPY_CHK:
11583 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11584 break;
11585 case BUILT_IN_MEMPCPY_CHK:
11586 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11587 break;
11588 case BUILT_IN_MEMMOVE_CHK:
11589 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11590 break;
11591 case BUILT_IN_MEMSET_CHK:
11592 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11593 break;
11594 default:
11595 break;
11598 if (! fn)
11599 return NULL_RTX;
11601 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11602 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11603 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11604 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11606 else if (fcode == BUILT_IN_MEMSET_CHK)
11607 return NULL_RTX;
11608 else
11610 unsigned int dest_align = get_pointer_alignment (dest);
11612 /* If DEST is not a pointer type, call the normal function. */
11613 if (dest_align == 0)
11614 return NULL_RTX;
11616 /* If SRC and DEST are the same (and not volatile), do nothing. */
11617 if (operand_equal_p (src, dest, 0))
11619 tree expr;
11621 if (fcode != BUILT_IN_MEMPCPY_CHK)
11623 /* Evaluate and ignore LEN in case it has side-effects. */
11624 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11625 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11628 expr = fold_build_pointer_plus (dest, len);
11629 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11632 /* __memmove_chk special case. */
11633 if (fcode == BUILT_IN_MEMMOVE_CHK)
11635 unsigned int src_align = get_pointer_alignment (src);
11637 if (src_align == 0)
11638 return NULL_RTX;
11640 /* If src is categorized for a readonly section we can use
11641 normal __memcpy_chk. */
11642 if (readonly_data_expr (src))
11644 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11645 if (!fn)
11646 return NULL_RTX;
11647 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11648 dest, src, len, size);
11649 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11650 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11651 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11654 return NULL_RTX;
11658 /* Emit warning if a buffer overflow is detected at compile time. */
11660 static void
11661 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11663 /* The source string. */
11664 tree srcstr = NULL_TREE;
11665 /* The size of the destination object returned by __builtin_object_size. */
11666 tree objsize = NULL_TREE;
11667 /* The string that is being concatenated with (as in __strcat_chk)
11668 or null if it isn't. */
11669 tree catstr = NULL_TREE;
11670 /* The maximum length of the source sequence in a bounded operation
11671 (such as __strncat_chk) or null if the operation isn't bounded
11672 (such as __strcat_chk). */
11673 tree maxread = NULL_TREE;
11674 /* The exact size of the access (such as in __strncpy_chk). */
11675 tree size = NULL_TREE;
11676 /* The access by the function that's checked. Except for snprintf
11677 both writing and reading is checked. */
11678 access_mode mode = access_read_write;
11680 switch (fcode)
11682 case BUILT_IN_STRCPY_CHK:
11683 case BUILT_IN_STPCPY_CHK:
11684 srcstr = CALL_EXPR_ARG (exp, 1);
11685 objsize = CALL_EXPR_ARG (exp, 2);
11686 break;
11688 case BUILT_IN_STRCAT_CHK:
11689 /* For __strcat_chk the warning will be emitted only if overflowing
11690 by at least strlen (dest) + 1 bytes. */
11691 catstr = CALL_EXPR_ARG (exp, 0);
11692 srcstr = CALL_EXPR_ARG (exp, 1);
11693 objsize = CALL_EXPR_ARG (exp, 2);
11694 break;
11696 case BUILT_IN_STRNCAT_CHK:
11697 catstr = CALL_EXPR_ARG (exp, 0);
11698 srcstr = CALL_EXPR_ARG (exp, 1);
11699 maxread = CALL_EXPR_ARG (exp, 2);
11700 objsize = CALL_EXPR_ARG (exp, 3);
11701 break;
11703 case BUILT_IN_STRNCPY_CHK:
11704 case BUILT_IN_STPNCPY_CHK:
11705 srcstr = CALL_EXPR_ARG (exp, 1);
11706 size = CALL_EXPR_ARG (exp, 2);
11707 objsize = CALL_EXPR_ARG (exp, 3);
11708 break;
11710 case BUILT_IN_SNPRINTF_CHK:
11711 case BUILT_IN_VSNPRINTF_CHK:
11712 maxread = CALL_EXPR_ARG (exp, 1);
11713 objsize = CALL_EXPR_ARG (exp, 3);
11714 /* The only checked access the write to the destination. */
11715 mode = access_write_only;
11716 break;
11717 default:
11718 gcc_unreachable ();
11721 if (catstr && maxread)
11723 /* Check __strncat_chk. There is no way to determine the length
11724 of the string to which the source string is being appended so
11725 just warn when the length of the source string is not known. */
11726 check_strncat_sizes (exp, objsize);
11727 return;
11730 check_access (exp, size, maxread, srcstr, objsize, mode);
11733 /* Emit warning if a buffer overflow is detected at compile time
11734 in __sprintf_chk/__vsprintf_chk calls. */
11736 static void
11737 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11739 tree size, len, fmt;
11740 const char *fmt_str;
11741 int nargs = call_expr_nargs (exp);
11743 /* Verify the required arguments in the original call. */
11745 if (nargs < 4)
11746 return;
11747 size = CALL_EXPR_ARG (exp, 2);
11748 fmt = CALL_EXPR_ARG (exp, 3);
11750 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11751 return;
11753 /* Check whether the format is a literal string constant. */
11754 fmt_str = c_getstr (fmt);
11755 if (fmt_str == NULL)
11756 return;
11758 if (!init_target_chars ())
11759 return;
11761 /* If the format doesn't contain % args or %%, we know its size. */
11762 if (strchr (fmt_str, target_percent) == 0)
11763 len = build_int_cstu (size_type_node, strlen (fmt_str));
11764 /* If the format is "%s" and first ... argument is a string literal,
11765 we know it too. */
11766 else if (fcode == BUILT_IN_SPRINTF_CHK
11767 && strcmp (fmt_str, target_percent_s) == 0)
11769 tree arg;
11771 if (nargs < 5)
11772 return;
11773 arg = CALL_EXPR_ARG (exp, 4);
11774 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11775 return;
11777 len = c_strlen (arg, 1);
11778 if (!len || ! tree_fits_uhwi_p (len))
11779 return;
11781 else
11782 return;
11784 /* Add one for the terminating nul. */
11785 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
11787 check_access (exp, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, len, size,
11788 access_write_only);
11791 /* Emit warning if a free is called with address of a variable. */
11793 static void
11794 maybe_emit_free_warning (tree exp)
11796 if (call_expr_nargs (exp) != 1)
11797 return;
11799 tree arg = CALL_EXPR_ARG (exp, 0);
11801 STRIP_NOPS (arg);
11802 if (TREE_CODE (arg) != ADDR_EXPR)
11803 return;
11805 arg = get_base_address (TREE_OPERAND (arg, 0));
11806 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11807 return;
11809 if (SSA_VAR_P (arg))
11810 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11811 "%Kattempt to free a non-heap object %qD", exp, arg);
11812 else
11813 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11814 "%Kattempt to free a non-heap object", exp);
11817 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11818 if possible. */
11820 static tree
11821 fold_builtin_object_size (tree ptr, tree ost)
11823 unsigned HOST_WIDE_INT bytes;
11824 int object_size_type;
11826 if (!validate_arg (ptr, POINTER_TYPE)
11827 || !validate_arg (ost, INTEGER_TYPE))
11828 return NULL_TREE;
11830 STRIP_NOPS (ost);
11832 if (TREE_CODE (ost) != INTEGER_CST
11833 || tree_int_cst_sgn (ost) < 0
11834 || compare_tree_int (ost, 3) > 0)
11835 return NULL_TREE;
11837 object_size_type = tree_to_shwi (ost);
11839 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11840 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11841 and (size_t) 0 for types 2 and 3. */
11842 if (TREE_SIDE_EFFECTS (ptr))
11843 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11845 if (TREE_CODE (ptr) == ADDR_EXPR)
11847 compute_builtin_object_size (ptr, object_size_type, &bytes);
11848 if (wi::fits_to_tree_p (bytes, size_type_node))
11849 return build_int_cstu (size_type_node, bytes);
11851 else if (TREE_CODE (ptr) == SSA_NAME)
11853 /* If object size is not known yet, delay folding until
11854 later. Maybe subsequent passes will help determining
11855 it. */
11856 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
11857 && wi::fits_to_tree_p (bytes, size_type_node))
11858 return build_int_cstu (size_type_node, bytes);
11861 return NULL_TREE;
11864 /* Builtins with folding operations that operate on "..." arguments
11865 need special handling; we need to store the arguments in a convenient
11866 data structure before attempting any folding. Fortunately there are
11867 only a few builtins that fall into this category. FNDECL is the
11868 function, EXP is the CALL_EXPR for the call. */
11870 static tree
11871 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11873 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11874 tree ret = NULL_TREE;
11876 switch (fcode)
11878 case BUILT_IN_FPCLASSIFY:
11879 ret = fold_builtin_fpclassify (loc, args, nargs);
11880 break;
11882 default:
11883 break;
11885 if (ret)
11887 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11888 SET_EXPR_LOCATION (ret, loc);
11889 TREE_NO_WARNING (ret) = 1;
11890 return ret;
11892 return NULL_TREE;
11895 /* Initialize format string characters in the target charset. */
11897 bool
11898 init_target_chars (void)
11900 static bool init;
11901 if (!init)
11903 target_newline = lang_hooks.to_target_charset ('\n');
11904 target_percent = lang_hooks.to_target_charset ('%');
11905 target_c = lang_hooks.to_target_charset ('c');
11906 target_s = lang_hooks.to_target_charset ('s');
11907 if (target_newline == 0 || target_percent == 0 || target_c == 0
11908 || target_s == 0)
11909 return false;
11911 target_percent_c[0] = target_percent;
11912 target_percent_c[1] = target_c;
11913 target_percent_c[2] = '\0';
11915 target_percent_s[0] = target_percent;
11916 target_percent_s[1] = target_s;
11917 target_percent_s[2] = '\0';
11919 target_percent_s_newline[0] = target_percent;
11920 target_percent_s_newline[1] = target_s;
11921 target_percent_s_newline[2] = target_newline;
11922 target_percent_s_newline[3] = '\0';
11924 init = true;
11926 return true;
11929 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11930 and no overflow/underflow occurred. INEXACT is true if M was not
11931 exactly calculated. TYPE is the tree type for the result. This
11932 function assumes that you cleared the MPFR flags and then
11933 calculated M to see if anything subsequently set a flag prior to
11934 entering this function. Return NULL_TREE if any checks fail. */
11936 static tree
11937 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11939 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11940 overflow/underflow occurred. If -frounding-math, proceed iff the
11941 result of calling FUNC was exact. */
11942 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11943 && (!flag_rounding_math || !inexact))
11945 REAL_VALUE_TYPE rr;
11947 real_from_mpfr (&rr, m, type, MPFR_RNDN);
11948 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11949 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11950 but the mpft_t is not, then we underflowed in the
11951 conversion. */
11952 if (real_isfinite (&rr)
11953 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11955 REAL_VALUE_TYPE rmode;
11957 real_convert (&rmode, TYPE_MODE (type), &rr);
11958 /* Proceed iff the specified mode can hold the value. */
11959 if (real_identical (&rmode, &rr))
11960 return build_real (type, rmode);
11963 return NULL_TREE;
11966 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11967 number and no overflow/underflow occurred. INEXACT is true if M
11968 was not exactly calculated. TYPE is the tree type for the result.
11969 This function assumes that you cleared the MPFR flags and then
11970 calculated M to see if anything subsequently set a flag prior to
11971 entering this function. Return NULL_TREE if any checks fail, if
11972 FORCE_CONVERT is true, then bypass the checks. */
11974 static tree
11975 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11977 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11978 overflow/underflow occurred. If -frounding-math, proceed iff the
11979 result of calling FUNC was exact. */
11980 if (force_convert
11981 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11982 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11983 && (!flag_rounding_math || !inexact)))
11985 REAL_VALUE_TYPE re, im;
11987 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), MPFR_RNDN);
11988 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), MPFR_RNDN);
11989 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11990 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11991 but the mpft_t is not, then we underflowed in the
11992 conversion. */
11993 if (force_convert
11994 || (real_isfinite (&re) && real_isfinite (&im)
11995 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11996 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11998 REAL_VALUE_TYPE re_mode, im_mode;
12000 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12001 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12002 /* Proceed iff the specified mode can hold the value. */
12003 if (force_convert
12004 || (real_identical (&re_mode, &re)
12005 && real_identical (&im_mode, &im)))
12006 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12007 build_real (TREE_TYPE (type), im_mode));
12010 return NULL_TREE;
12013 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12014 the pointer *(ARG_QUO) and return the result. The type is taken
12015 from the type of ARG0 and is used for setting the precision of the
12016 calculation and results. */
12018 static tree
12019 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12021 tree const type = TREE_TYPE (arg0);
12022 tree result = NULL_TREE;
12024 STRIP_NOPS (arg0);
12025 STRIP_NOPS (arg1);
12027 /* To proceed, MPFR must exactly represent the target floating point
12028 format, which only happens when the target base equals two. */
12029 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12030 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12031 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12033 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12034 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12036 if (real_isfinite (ra0) && real_isfinite (ra1))
12038 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12039 const int prec = fmt->p;
12040 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
12041 tree result_rem;
12042 long integer_quo;
12043 mpfr_t m0, m1;
12045 mpfr_inits2 (prec, m0, m1, NULL);
12046 mpfr_from_real (m0, ra0, MPFR_RNDN);
12047 mpfr_from_real (m1, ra1, MPFR_RNDN);
12048 mpfr_clear_flags ();
12049 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
12050 /* Remquo is independent of the rounding mode, so pass
12051 inexact=0 to do_mpfr_ckconv(). */
12052 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12053 mpfr_clears (m0, m1, NULL);
12054 if (result_rem)
12056 /* MPFR calculates quo in the host's long so it may
12057 return more bits in quo than the target int can hold
12058 if sizeof(host long) > sizeof(target int). This can
12059 happen even for native compilers in LP64 mode. In
12060 these cases, modulo the quo value with the largest
12061 number that the target int can hold while leaving one
12062 bit for the sign. */
12063 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12064 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12066 /* Dereference the quo pointer argument. */
12067 arg_quo = build_fold_indirect_ref (arg_quo);
12068 /* Proceed iff a valid pointer type was passed in. */
12069 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12071 /* Set the value. */
12072 tree result_quo
12073 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12074 build_int_cst (TREE_TYPE (arg_quo),
12075 integer_quo));
12076 TREE_SIDE_EFFECTS (result_quo) = 1;
12077 /* Combine the quo assignment with the rem. */
12078 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12079 result_quo, result_rem));
12084 return result;
12087 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12088 resulting value as a tree with type TYPE. The mpfr precision is
12089 set to the precision of TYPE. We assume that this mpfr function
12090 returns zero if the result could be calculated exactly within the
12091 requested precision. In addition, the integer pointer represented
12092 by ARG_SG will be dereferenced and set to the appropriate signgam
12093 (-1,1) value. */
12095 static tree
12096 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12098 tree result = NULL_TREE;
12100 STRIP_NOPS (arg);
12102 /* To proceed, MPFR must exactly represent the target floating point
12103 format, which only happens when the target base equals two. Also
12104 verify ARG is a constant and that ARG_SG is an int pointer. */
12105 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12106 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12107 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12108 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12110 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12112 /* In addition to NaN and Inf, the argument cannot be zero or a
12113 negative integer. */
12114 if (real_isfinite (ra)
12115 && ra->cl != rvc_zero
12116 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
12118 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12119 const int prec = fmt->p;
12120 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
12121 int inexact, sg;
12122 mpfr_t m;
12123 tree result_lg;
12125 mpfr_init2 (m, prec);
12126 mpfr_from_real (m, ra, MPFR_RNDN);
12127 mpfr_clear_flags ();
12128 inexact = mpfr_lgamma (m, &sg, m, rnd);
12129 result_lg = do_mpfr_ckconv (m, type, inexact);
12130 mpfr_clear (m);
12131 if (result_lg)
12133 tree result_sg;
12135 /* Dereference the arg_sg pointer argument. */
12136 arg_sg = build_fold_indirect_ref (arg_sg);
12137 /* Assign the signgam value into *arg_sg. */
12138 result_sg = fold_build2 (MODIFY_EXPR,
12139 TREE_TYPE (arg_sg), arg_sg,
12140 build_int_cst (TREE_TYPE (arg_sg), sg));
12141 TREE_SIDE_EFFECTS (result_sg) = 1;
12142 /* Combine the signgam assignment with the lgamma result. */
12143 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12144 result_sg, result_lg));
12149 return result;
12152 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12153 mpc function FUNC on it and return the resulting value as a tree
12154 with type TYPE. The mpfr precision is set to the precision of
12155 TYPE. We assume that function FUNC returns zero if the result
12156 could be calculated exactly within the requested precision. If
12157 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12158 in the arguments and/or results. */
12160 tree
12161 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
12162 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12164 tree result = NULL_TREE;
12166 STRIP_NOPS (arg0);
12167 STRIP_NOPS (arg1);
12169 /* To proceed, MPFR must exactly represent the target floating point
12170 format, which only happens when the target base equals two. */
12171 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12172 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
12173 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12174 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
12175 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12177 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12178 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12179 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12180 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12182 if (do_nonfinite
12183 || (real_isfinite (re0) && real_isfinite (im0)
12184 && real_isfinite (re1) && real_isfinite (im1)))
12186 const struct real_format *const fmt =
12187 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12188 const int prec = fmt->p;
12189 const mpfr_rnd_t rnd = fmt->round_towards_zero
12190 ? MPFR_RNDZ : MPFR_RNDN;
12191 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12192 int inexact;
12193 mpc_t m0, m1;
12195 mpc_init2 (m0, prec);
12196 mpc_init2 (m1, prec);
12197 mpfr_from_real (mpc_realref (m0), re0, rnd);
12198 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12199 mpfr_from_real (mpc_realref (m1), re1, rnd);
12200 mpfr_from_real (mpc_imagref (m1), im1, rnd);
12201 mpfr_clear_flags ();
12202 inexact = func (m0, m0, m1, crnd);
12203 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
12204 mpc_clear (m0);
12205 mpc_clear (m1);
12209 return result;
12212 /* A wrapper function for builtin folding that prevents warnings for
12213 "statement without effect" and the like, caused by removing the
12214 call node earlier than the warning is generated. */
12216 tree
12217 fold_call_stmt (gcall *stmt, bool ignore)
12219 tree ret = NULL_TREE;
12220 tree fndecl = gimple_call_fndecl (stmt);
12221 location_t loc = gimple_location (stmt);
12222 if (fndecl && fndecl_built_in_p (fndecl)
12223 && !gimple_call_va_arg_pack_p (stmt))
12225 int nargs = gimple_call_num_args (stmt);
12226 tree *args = (nargs > 0
12227 ? gimple_call_arg_ptr (stmt, 0)
12228 : &error_mark_node);
12230 if (avoid_folding_inline_builtin (fndecl))
12231 return NULL_TREE;
12232 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12234 return targetm.fold_builtin (fndecl, nargs, args, ignore);
12236 else
12238 ret = fold_builtin_n (loc, NULL_TREE, fndecl, args, nargs, ignore);
12239 if (ret)
12241 /* Propagate location information from original call to
12242 expansion of builtin. Otherwise things like
12243 maybe_emit_chk_warning, that operate on the expansion
12244 of a builtin, will use the wrong location information. */
12245 if (gimple_has_location (stmt))
12247 tree realret = ret;
12248 if (TREE_CODE (ret) == NOP_EXPR)
12249 realret = TREE_OPERAND (ret, 0);
12250 if (CAN_HAVE_LOCATION_P (realret)
12251 && !EXPR_HAS_LOCATION (realret))
12252 SET_EXPR_LOCATION (realret, loc);
12253 return realret;
12255 return ret;
12259 return NULL_TREE;
12262 /* Look up the function in builtin_decl that corresponds to DECL
12263 and set ASMSPEC as its user assembler name. DECL must be a
12264 function decl that declares a builtin. */
12266 void
12267 set_builtin_user_assembler_name (tree decl, const char *asmspec)
12269 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
12270 && asmspec != 0);
12272 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12273 set_user_assembler_name (builtin, asmspec);
12275 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
12276 && INT_TYPE_SIZE < BITS_PER_WORD)
12278 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
12279 set_user_assembler_libfunc ("ffs", asmspec);
12280 set_optab_libfunc (ffs_optab, mode, "ffs");
12284 /* Return true if DECL is a builtin that expands to a constant or similarly
12285 simple code. */
12286 bool
12287 is_simple_builtin (tree decl)
12289 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
12290 switch (DECL_FUNCTION_CODE (decl))
12292 /* Builtins that expand to constants. */
12293 case BUILT_IN_CONSTANT_P:
12294 case BUILT_IN_EXPECT:
12295 case BUILT_IN_OBJECT_SIZE:
12296 case BUILT_IN_UNREACHABLE:
12297 /* Simple register moves or loads from stack. */
12298 case BUILT_IN_ASSUME_ALIGNED:
12299 case BUILT_IN_RETURN_ADDRESS:
12300 case BUILT_IN_EXTRACT_RETURN_ADDR:
12301 case BUILT_IN_FROB_RETURN_ADDR:
12302 case BUILT_IN_RETURN:
12303 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12304 case BUILT_IN_FRAME_ADDRESS:
12305 case BUILT_IN_VA_END:
12306 case BUILT_IN_STACK_SAVE:
12307 case BUILT_IN_STACK_RESTORE:
12308 /* Exception state returns or moves registers around. */
12309 case BUILT_IN_EH_FILTER:
12310 case BUILT_IN_EH_POINTER:
12311 case BUILT_IN_EH_COPY_VALUES:
12312 return true;
12314 default:
12315 return false;
12318 return false;
12321 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12322 most probably expanded inline into reasonably simple code. This is a
12323 superset of is_simple_builtin. */
12324 bool
12325 is_inexpensive_builtin (tree decl)
12327 if (!decl)
12328 return false;
12329 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12330 return true;
12331 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12332 switch (DECL_FUNCTION_CODE (decl))
12334 case BUILT_IN_ABS:
12335 CASE_BUILT_IN_ALLOCA:
12336 case BUILT_IN_BSWAP16:
12337 case BUILT_IN_BSWAP32:
12338 case BUILT_IN_BSWAP64:
12339 case BUILT_IN_BSWAP128:
12340 case BUILT_IN_CLZ:
12341 case BUILT_IN_CLZIMAX:
12342 case BUILT_IN_CLZL:
12343 case BUILT_IN_CLZLL:
12344 case BUILT_IN_CTZ:
12345 case BUILT_IN_CTZIMAX:
12346 case BUILT_IN_CTZL:
12347 case BUILT_IN_CTZLL:
12348 case BUILT_IN_FFS:
12349 case BUILT_IN_FFSIMAX:
12350 case BUILT_IN_FFSL:
12351 case BUILT_IN_FFSLL:
12352 case BUILT_IN_IMAXABS:
12353 case BUILT_IN_FINITE:
12354 case BUILT_IN_FINITEF:
12355 case BUILT_IN_FINITEL:
12356 case BUILT_IN_FINITED32:
12357 case BUILT_IN_FINITED64:
12358 case BUILT_IN_FINITED128:
12359 case BUILT_IN_FPCLASSIFY:
12360 case BUILT_IN_ISFINITE:
12361 case BUILT_IN_ISINF_SIGN:
12362 case BUILT_IN_ISINF:
12363 case BUILT_IN_ISINFF:
12364 case BUILT_IN_ISINFL:
12365 case BUILT_IN_ISINFD32:
12366 case BUILT_IN_ISINFD64:
12367 case BUILT_IN_ISINFD128:
12368 case BUILT_IN_ISNAN:
12369 case BUILT_IN_ISNANF:
12370 case BUILT_IN_ISNANL:
12371 case BUILT_IN_ISNAND32:
12372 case BUILT_IN_ISNAND64:
12373 case BUILT_IN_ISNAND128:
12374 case BUILT_IN_ISNORMAL:
12375 case BUILT_IN_ISGREATER:
12376 case BUILT_IN_ISGREATEREQUAL:
12377 case BUILT_IN_ISLESS:
12378 case BUILT_IN_ISLESSEQUAL:
12379 case BUILT_IN_ISLESSGREATER:
12380 case BUILT_IN_ISUNORDERED:
12381 case BUILT_IN_VA_ARG_PACK:
12382 case BUILT_IN_VA_ARG_PACK_LEN:
12383 case BUILT_IN_VA_COPY:
12384 case BUILT_IN_TRAP:
12385 case BUILT_IN_SAVEREGS:
12386 case BUILT_IN_POPCOUNTL:
12387 case BUILT_IN_POPCOUNTLL:
12388 case BUILT_IN_POPCOUNTIMAX:
12389 case BUILT_IN_POPCOUNT:
12390 case BUILT_IN_PARITYL:
12391 case BUILT_IN_PARITYLL:
12392 case BUILT_IN_PARITYIMAX:
12393 case BUILT_IN_PARITY:
12394 case BUILT_IN_LABS:
12395 case BUILT_IN_LLABS:
12396 case BUILT_IN_PREFETCH:
12397 case BUILT_IN_ACC_ON_DEVICE:
12398 return true;
12400 default:
12401 return is_simple_builtin (decl);
12404 return false;
12407 /* Return true if T is a constant and the value cast to a target char
12408 can be represented by a host char.
12409 Store the casted char constant in *P if so. */
12411 bool
12412 target_char_cst_p (tree t, char *p)
12414 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
12415 return false;
12417 *p = (char)tree_to_uhwi (t);
12418 return true;
12421 /* Return true if the builtin DECL is implemented in a standard library.
12422 Otherwise returns false which doesn't guarantee it is not (thus the list of
12423 handled builtins below may be incomplete). */
12425 bool
12426 builtin_with_linkage_p (tree decl)
12428 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12429 switch (DECL_FUNCTION_CODE (decl))
12431 CASE_FLT_FN (BUILT_IN_ACOS):
12432 CASE_FLT_FN (BUILT_IN_ACOSH):
12433 CASE_FLT_FN (BUILT_IN_ASIN):
12434 CASE_FLT_FN (BUILT_IN_ASINH):
12435 CASE_FLT_FN (BUILT_IN_ATAN):
12436 CASE_FLT_FN (BUILT_IN_ATANH):
12437 CASE_FLT_FN (BUILT_IN_ATAN2):
12438 CASE_FLT_FN (BUILT_IN_CBRT):
12439 CASE_FLT_FN (BUILT_IN_CEIL):
12440 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
12441 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12442 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
12443 CASE_FLT_FN (BUILT_IN_COS):
12444 CASE_FLT_FN (BUILT_IN_COSH):
12445 CASE_FLT_FN (BUILT_IN_ERF):
12446 CASE_FLT_FN (BUILT_IN_ERFC):
12447 CASE_FLT_FN (BUILT_IN_EXP):
12448 CASE_FLT_FN (BUILT_IN_EXP2):
12449 CASE_FLT_FN (BUILT_IN_EXPM1):
12450 CASE_FLT_FN (BUILT_IN_FABS):
12451 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
12452 CASE_FLT_FN (BUILT_IN_FDIM):
12453 CASE_FLT_FN (BUILT_IN_FLOOR):
12454 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
12455 CASE_FLT_FN (BUILT_IN_FMA):
12456 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
12457 CASE_FLT_FN (BUILT_IN_FMAX):
12458 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
12459 CASE_FLT_FN (BUILT_IN_FMIN):
12460 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
12461 CASE_FLT_FN (BUILT_IN_FMOD):
12462 CASE_FLT_FN (BUILT_IN_FREXP):
12463 CASE_FLT_FN (BUILT_IN_HYPOT):
12464 CASE_FLT_FN (BUILT_IN_ILOGB):
12465 CASE_FLT_FN (BUILT_IN_LDEXP):
12466 CASE_FLT_FN (BUILT_IN_LGAMMA):
12467 CASE_FLT_FN (BUILT_IN_LLRINT):
12468 CASE_FLT_FN (BUILT_IN_LLROUND):
12469 CASE_FLT_FN (BUILT_IN_LOG):
12470 CASE_FLT_FN (BUILT_IN_LOG10):
12471 CASE_FLT_FN (BUILT_IN_LOG1P):
12472 CASE_FLT_FN (BUILT_IN_LOG2):
12473 CASE_FLT_FN (BUILT_IN_LOGB):
12474 CASE_FLT_FN (BUILT_IN_LRINT):
12475 CASE_FLT_FN (BUILT_IN_LROUND):
12476 CASE_FLT_FN (BUILT_IN_MODF):
12477 CASE_FLT_FN (BUILT_IN_NAN):
12478 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12479 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
12480 CASE_FLT_FN (BUILT_IN_NEXTAFTER):
12481 CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
12482 CASE_FLT_FN (BUILT_IN_POW):
12483 CASE_FLT_FN (BUILT_IN_REMAINDER):
12484 CASE_FLT_FN (BUILT_IN_REMQUO):
12485 CASE_FLT_FN (BUILT_IN_RINT):
12486 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
12487 CASE_FLT_FN (BUILT_IN_ROUND):
12488 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
12489 CASE_FLT_FN (BUILT_IN_SCALBLN):
12490 CASE_FLT_FN (BUILT_IN_SCALBN):
12491 CASE_FLT_FN (BUILT_IN_SIN):
12492 CASE_FLT_FN (BUILT_IN_SINH):
12493 CASE_FLT_FN (BUILT_IN_SINCOS):
12494 CASE_FLT_FN (BUILT_IN_SQRT):
12495 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
12496 CASE_FLT_FN (BUILT_IN_TAN):
12497 CASE_FLT_FN (BUILT_IN_TANH):
12498 CASE_FLT_FN (BUILT_IN_TGAMMA):
12499 CASE_FLT_FN (BUILT_IN_TRUNC):
12500 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
12501 return true;
12502 default:
12503 break;
12505 return false;