Improve costs for DImode shifts of interger constants.
[official-gcc.git] / gcc / builtins.c
blob8b9a4a4d948b37a2aa438bc2a59d66b398850413
1 /* Expand builtin functions.
2 Copyright (C) 1988-2020 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-restrict.h"
47 #include "stor-layout.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "tree-object-size.h"
51 #include "tree-ssa-strlen.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
75 #include "tree-dfa.h"
76 #include "gimple-ssa.h"
77 #include "tree-ssa-live.h"
78 #include "tree-outof-ssa.h"
80 struct target_builtins default_target_builtins;
81 #if SWITCHABLE_TARGET
82 struct target_builtins *this_target_builtins = &default_target_builtins;
83 #endif
85 /* Define the names of the builtin function types and codes. */
86 const char *const built_in_class_names[BUILT_IN_LAST]
87 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
89 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
90 const char * built_in_names[(int) END_BUILTINS] =
92 #include "builtins.def"
95 /* Setup an array of builtin_info_type, make sure each element decl is
96 initialized to NULL_TREE. */
97 builtin_info_type builtin_info[(int)END_BUILTINS];
99 /* Non-zero if __builtin_constant_p should be folded right away. */
100 bool force_folding_builtin_constant_p;
102 static int target_char_cast (tree, char *);
103 static rtx get_memory_rtx (tree, tree);
104 static int apply_args_size (void);
105 static int apply_result_size (void);
106 static rtx result_vector (int, rtx);
107 static void expand_builtin_prefetch (tree);
108 static rtx expand_builtin_apply_args (void);
109 static rtx expand_builtin_apply_args_1 (void);
110 static rtx expand_builtin_apply (rtx, rtx, rtx);
111 static void expand_builtin_return (rtx);
112 static enum type_class type_to_class (tree);
113 static rtx expand_builtin_classify_type (tree);
114 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
115 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
116 static rtx expand_builtin_interclass_mathfn (tree, rtx);
117 static rtx expand_builtin_sincos (tree);
118 static rtx expand_builtin_cexpi (tree, rtx);
119 static rtx expand_builtin_int_roundingfn (tree, rtx);
120 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
121 static rtx expand_builtin_next_arg (void);
122 static rtx expand_builtin_va_start (tree);
123 static rtx expand_builtin_va_end (tree);
124 static rtx expand_builtin_va_copy (tree);
125 static rtx inline_expand_builtin_bytecmp (tree, rtx);
126 static rtx expand_builtin_strcmp (tree, rtx);
127 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
128 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
129 static rtx expand_builtin_memchr (tree, rtx);
130 static rtx expand_builtin_memcpy (tree, rtx);
131 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
132 rtx target, tree exp,
133 memop_ret retmode,
134 bool might_overlap);
135 static rtx expand_builtin_memmove (tree, rtx);
136 static rtx expand_builtin_mempcpy (tree, rtx);
137 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
138 static rtx expand_builtin_strcat (tree);
139 static rtx expand_builtin_strcpy (tree, rtx);
140 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
141 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
142 static rtx expand_builtin_stpncpy (tree, rtx);
143 static rtx expand_builtin_strncat (tree, rtx);
144 static rtx expand_builtin_strncpy (tree, rtx);
145 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
146 static rtx expand_builtin_memset (tree, rtx, machine_mode);
147 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
148 static rtx expand_builtin_bzero (tree);
149 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
150 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
151 static rtx expand_builtin_alloca (tree);
152 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
153 static rtx expand_builtin_frame_address (tree, tree);
154 static tree stabilize_va_list_loc (location_t, tree, int);
155 static rtx expand_builtin_expect (tree, rtx);
156 static rtx expand_builtin_expect_with_probability (tree, rtx);
157 static tree fold_builtin_constant_p (tree);
158 static tree fold_builtin_classify_type (tree);
159 static tree fold_builtin_strlen (location_t, tree, tree, tree);
160 static tree fold_builtin_inf (location_t, tree, int);
161 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
162 static bool validate_arg (const_tree, enum tree_code code);
163 static rtx expand_builtin_fabs (tree, rtx, rtx);
164 static rtx expand_builtin_signbit (tree, rtx);
165 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
166 static tree fold_builtin_isascii (location_t, tree);
167 static tree fold_builtin_toascii (location_t, tree);
168 static tree fold_builtin_isdigit (location_t, tree);
169 static tree fold_builtin_fabs (location_t, tree, tree);
170 static tree fold_builtin_abs (location_t, tree, tree);
171 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
172 enum tree_code);
173 static tree fold_builtin_varargs (location_t, tree, tree*, int);
175 static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
176 static tree fold_builtin_strspn (location_t, tree, tree, tree);
177 static tree fold_builtin_strcspn (location_t, tree, tree, tree);
179 static rtx expand_builtin_object_size (tree);
180 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
181 enum built_in_function);
182 static void maybe_emit_chk_warning (tree, enum built_in_function);
183 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
184 static void maybe_emit_free_warning (tree);
185 static tree fold_builtin_object_size (tree, tree);
186 static tree compute_objsize (tree, int, access_ref *, const vr_values * = NULL);
187 static bool get_range (tree, signop, offset_int[2], const vr_values * = NULL);
188 static bool check_read_access (tree, tree, tree = NULL_TREE, int = 1);
190 unsigned HOST_WIDE_INT target_newline;
191 unsigned HOST_WIDE_INT target_percent;
192 static unsigned HOST_WIDE_INT target_c;
193 static unsigned HOST_WIDE_INT target_s;
194 char target_percent_c[3];
195 char target_percent_s[3];
196 char target_percent_s_newline[4];
197 static tree do_mpfr_remquo (tree, tree, tree);
198 static tree do_mpfr_lgamma_r (tree, tree, tree);
199 static void expand_builtin_sync_synchronize (void);
201 access_ref::access_ref (tree bound /* = NULL_TREE */,
202 bool minaccess /* = false */)
203 : ref ()
205 /* Set to valid. */
206 offrng[0] = offrng[1] = 0;
207 /* Invalidate. */
208 sizrng[0] = sizrng[1] = -1;
210 /* Set the default bounds of the access and adjust below. */
211 bndrng[0] = minaccess ? 1 : 0;
212 bndrng[1] = HOST_WIDE_INT_M1U;
214 /* When BOUND is nonnull and a range can be extracted from it,
215 set the bounds of the access to reflect both it and MINACCESS.
216 BNDRNG[0] is the size of the minimum access. */
217 tree rng[2];
218 if (bound && get_size_range (bound, rng, true))
220 bndrng[0] = wi::to_offset (rng[0]);
221 bndrng[1] = wi::to_offset (rng[1]);
222 bndrng[0] = bndrng[0] > 0 && minaccess ? 1 : 0;
226 /* Return true if NAME starts with __builtin_ or __sync_. */
228 static bool
229 is_builtin_name (const char *name)
231 if (strncmp (name, "__builtin_", 10) == 0)
232 return true;
233 if (strncmp (name, "__sync_", 7) == 0)
234 return true;
235 if (strncmp (name, "__atomic_", 9) == 0)
236 return true;
237 return false;
240 /* Return true if NODE should be considered for inline expansion regardless
241 of the optimization level. This means whenever a function is invoked with
242 its "internal" name, which normally contains the prefix "__builtin". */
244 bool
245 called_as_built_in (tree node)
247 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
248 we want the name used to call the function, not the name it
249 will have. */
250 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
251 return is_builtin_name (name);
254 /* Compute values M and N such that M divides (address of EXP - N) and such
255 that N < M. If these numbers can be determined, store M in alignp and N in
256 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
257 *alignp and any bit-offset to *bitposp.
259 Note that the address (and thus the alignment) computed here is based
260 on the address to which a symbol resolves, whereas DECL_ALIGN is based
261 on the address at which an object is actually located. These two
262 addresses are not always the same. For example, on ARM targets,
263 the address &foo of a Thumb function foo() has the lowest bit set,
264 whereas foo() itself starts on an even address.
266 If ADDR_P is true we are taking the address of the memory reference EXP
267 and thus cannot rely on the access taking place. */
269 static bool
270 get_object_alignment_2 (tree exp, unsigned int *alignp,
271 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
273 poly_int64 bitsize, bitpos;
274 tree offset;
275 machine_mode mode;
276 int unsignedp, reversep, volatilep;
277 unsigned int align = BITS_PER_UNIT;
278 bool known_alignment = false;
280 /* Get the innermost object and the constant (bitpos) and possibly
281 variable (offset) offset of the access. */
282 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
283 &unsignedp, &reversep, &volatilep);
285 /* Extract alignment information from the innermost object and
286 possibly adjust bitpos and offset. */
287 if (TREE_CODE (exp) == FUNCTION_DECL)
289 /* Function addresses can encode extra information besides their
290 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
291 allows the low bit to be used as a virtual bit, we know
292 that the address itself must be at least 2-byte aligned. */
293 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
294 align = 2 * BITS_PER_UNIT;
296 else if (TREE_CODE (exp) == LABEL_DECL)
298 else if (TREE_CODE (exp) == CONST_DECL)
300 /* The alignment of a CONST_DECL is determined by its initializer. */
301 exp = DECL_INITIAL (exp);
302 align = TYPE_ALIGN (TREE_TYPE (exp));
303 if (CONSTANT_CLASS_P (exp))
304 align = targetm.constant_alignment (exp, align);
306 known_alignment = true;
308 else if (DECL_P (exp))
310 align = DECL_ALIGN (exp);
311 known_alignment = true;
313 else if (TREE_CODE (exp) == INDIRECT_REF
314 || TREE_CODE (exp) == MEM_REF
315 || TREE_CODE (exp) == TARGET_MEM_REF)
317 tree addr = TREE_OPERAND (exp, 0);
318 unsigned ptr_align;
319 unsigned HOST_WIDE_INT ptr_bitpos;
320 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
322 /* If the address is explicitely aligned, handle that. */
323 if (TREE_CODE (addr) == BIT_AND_EXPR
324 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
326 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
327 ptr_bitmask *= BITS_PER_UNIT;
328 align = least_bit_hwi (ptr_bitmask);
329 addr = TREE_OPERAND (addr, 0);
332 known_alignment
333 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
334 align = MAX (ptr_align, align);
336 /* Re-apply explicit alignment to the bitpos. */
337 ptr_bitpos &= ptr_bitmask;
339 /* The alignment of the pointer operand in a TARGET_MEM_REF
340 has to take the variable offset parts into account. */
341 if (TREE_CODE (exp) == TARGET_MEM_REF)
343 if (TMR_INDEX (exp))
345 unsigned HOST_WIDE_INT step = 1;
346 if (TMR_STEP (exp))
347 step = TREE_INT_CST_LOW (TMR_STEP (exp));
348 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
350 if (TMR_INDEX2 (exp))
351 align = BITS_PER_UNIT;
352 known_alignment = false;
355 /* When EXP is an actual memory reference then we can use
356 TYPE_ALIGN of a pointer indirection to derive alignment.
357 Do so only if get_pointer_alignment_1 did not reveal absolute
358 alignment knowledge and if using that alignment would
359 improve the situation. */
360 unsigned int talign;
361 if (!addr_p && !known_alignment
362 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
363 && talign > align)
364 align = talign;
365 else
367 /* Else adjust bitpos accordingly. */
368 bitpos += ptr_bitpos;
369 if (TREE_CODE (exp) == MEM_REF
370 || TREE_CODE (exp) == TARGET_MEM_REF)
371 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
374 else if (TREE_CODE (exp) == STRING_CST)
376 /* STRING_CST are the only constant objects we allow to be not
377 wrapped inside a CONST_DECL. */
378 align = TYPE_ALIGN (TREE_TYPE (exp));
379 if (CONSTANT_CLASS_P (exp))
380 align = targetm.constant_alignment (exp, align);
382 known_alignment = true;
385 /* If there is a non-constant offset part extract the maximum
386 alignment that can prevail. */
387 if (offset)
389 unsigned int trailing_zeros = tree_ctz (offset);
390 if (trailing_zeros < HOST_BITS_PER_INT)
392 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
393 if (inner)
394 align = MIN (align, inner);
398 /* Account for the alignment of runtime coefficients, so that the constant
399 bitpos is guaranteed to be accurate. */
400 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
401 if (alt_align != 0 && alt_align < align)
403 align = alt_align;
404 known_alignment = false;
407 *alignp = align;
408 *bitposp = bitpos.coeffs[0] & (align - 1);
409 return known_alignment;
412 /* For a memory reference expression EXP compute values M and N such that M
413 divides (&EXP - N) and such that N < M. If these numbers can be determined,
414 store M in alignp and N in *BITPOSP and return true. Otherwise return false
415 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
417 bool
418 get_object_alignment_1 (tree exp, unsigned int *alignp,
419 unsigned HOST_WIDE_INT *bitposp)
421 return get_object_alignment_2 (exp, alignp, bitposp, false);
424 /* Return the alignment in bits of EXP, an object. */
426 unsigned int
427 get_object_alignment (tree exp)
429 unsigned HOST_WIDE_INT bitpos = 0;
430 unsigned int align;
432 get_object_alignment_1 (exp, &align, &bitpos);
434 /* align and bitpos now specify known low bits of the pointer.
435 ptr & (align - 1) == bitpos. */
437 if (bitpos != 0)
438 align = least_bit_hwi (bitpos);
439 return align;
442 /* For a pointer valued expression EXP compute values M and N such that M
443 divides (EXP - N) and such that N < M. If these numbers can be determined,
444 store M in alignp and N in *BITPOSP and return true. Return false if
445 the results are just a conservative approximation.
447 If EXP is not a pointer, false is returned too. */
449 bool
450 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
451 unsigned HOST_WIDE_INT *bitposp)
453 STRIP_NOPS (exp);
455 if (TREE_CODE (exp) == ADDR_EXPR)
456 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
457 alignp, bitposp, true);
458 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
460 unsigned int align;
461 unsigned HOST_WIDE_INT bitpos;
462 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
463 &align, &bitpos);
464 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
465 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
466 else
468 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
469 if (trailing_zeros < HOST_BITS_PER_INT)
471 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
472 if (inner)
473 align = MIN (align, inner);
476 *alignp = align;
477 *bitposp = bitpos & (align - 1);
478 return res;
480 else if (TREE_CODE (exp) == SSA_NAME
481 && POINTER_TYPE_P (TREE_TYPE (exp)))
483 unsigned int ptr_align, ptr_misalign;
484 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
486 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
488 *bitposp = ptr_misalign * BITS_PER_UNIT;
489 *alignp = ptr_align * BITS_PER_UNIT;
490 /* Make sure to return a sensible alignment when the multiplication
491 by BITS_PER_UNIT overflowed. */
492 if (*alignp == 0)
493 *alignp = 1u << (HOST_BITS_PER_INT - 1);
494 /* We cannot really tell whether this result is an approximation. */
495 return false;
497 else
499 *bitposp = 0;
500 *alignp = BITS_PER_UNIT;
501 return false;
504 else if (TREE_CODE (exp) == INTEGER_CST)
506 *alignp = BIGGEST_ALIGNMENT;
507 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
508 & (BIGGEST_ALIGNMENT - 1));
509 return true;
512 *bitposp = 0;
513 *alignp = BITS_PER_UNIT;
514 return false;
517 /* Return the alignment in bits of EXP, a pointer valued expression.
518 The alignment returned is, by default, the alignment of the thing that
519 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
521 Otherwise, look at the expression to see if we can do better, i.e., if the
522 expression is actually pointing at an object whose alignment is tighter. */
524 unsigned int
525 get_pointer_alignment (tree exp)
527 unsigned HOST_WIDE_INT bitpos = 0;
528 unsigned int align;
530 get_pointer_alignment_1 (exp, &align, &bitpos);
532 /* align and bitpos now specify known low bits of the pointer.
533 ptr & (align - 1) == bitpos. */
535 if (bitpos != 0)
536 align = least_bit_hwi (bitpos);
538 return align;
541 /* Return the number of leading non-zero elements in the sequence
542 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
543 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
545 unsigned
546 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
548 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
550 unsigned n;
552 if (eltsize == 1)
554 /* Optimize the common case of plain char. */
555 for (n = 0; n < maxelts; n++)
557 const char *elt = (const char*) ptr + n;
558 if (!*elt)
559 break;
562 else
564 for (n = 0; n < maxelts; n++)
566 const char *elt = (const char*) ptr + n * eltsize;
567 if (!memcmp (elt, "\0\0\0\0", eltsize))
568 break;
571 return n;
574 /* For a call EXPR at LOC to a function FNAME that expects a string
575 in the argument ARG, issue a diagnostic due to it being a called
576 with an argument that is a character array with no terminating
577 NUL. SIZE is the EXACT size of the array, and BNDRNG the number
578 of characters in which the NUL is expected. Either EXPR or FNAME
579 may be null but noth both. SIZE may be null when BNDRNG is null. */
581 void
582 warn_string_no_nul (location_t loc, tree expr, const char *fname,
583 tree arg, tree decl, tree size /* = NULL_TREE */,
584 bool exact /* = false */,
585 const wide_int bndrng[2] /* = NULL */)
587 if ((expr && TREE_NO_WARNING (expr)) || TREE_NO_WARNING (arg))
588 return;
590 loc = expansion_point_location_if_in_system_header (loc);
591 bool warned;
593 /* Format the bound range as a string to keep the nuber of messages
594 from exploding. */
595 char bndstr[80];
596 *bndstr = 0;
597 if (bndrng)
599 if (bndrng[0] == bndrng[1])
600 sprintf (bndstr, "%llu", (unsigned long long) bndrng[0].to_uhwi ());
601 else
602 sprintf (bndstr, "[%llu, %llu]",
603 (unsigned long long) bndrng[0].to_uhwi (),
604 (unsigned long long) bndrng[1].to_uhwi ());
607 const tree maxobjsize = max_object_size ();
608 const wide_int maxsiz = wi::to_wide (maxobjsize);
609 if (expr)
611 tree func = get_callee_fndecl (expr);
612 if (bndrng)
614 if (wi::ltu_p (maxsiz, bndrng[0]))
615 warned = warning_at (loc, OPT_Wstringop_overread,
616 "%K%qD specified bound %s exceeds "
617 "maximum object size %E",
618 expr, func, bndstr, maxobjsize);
619 else
621 bool maybe = wi::to_wide (size) == bndrng[0];
622 warned = warning_at (loc, OPT_Wstringop_overread,
623 exact
624 ? G_("%K%qD specified bound %s exceeds "
625 "the size %E of unterminated array")
626 : (maybe
627 ? G_("%K%qD specified bound %s may "
628 "exceed the size of at most %E "
629 "of unterminated array")
630 : G_("%K%qD specified bound %s exceeds "
631 "the size of at most %E "
632 "of unterminated array")),
633 expr, func, bndstr, size);
636 else
637 warned = warning_at (loc, OPT_Wstringop_overread,
638 "%K%qD argument missing terminating nul",
639 expr, func);
641 else
643 if (bndrng)
645 if (wi::ltu_p (maxsiz, bndrng[0]))
646 warned = warning_at (loc, OPT_Wstringop_overread,
647 "%qs specified bound %s exceeds "
648 "maximum object size %E",
649 fname, bndstr, maxobjsize);
650 else
652 bool maybe = wi::to_wide (size) == bndrng[0];
653 warned = warning_at (loc, OPT_Wstringop_overread,
654 exact
655 ? G_("%qs specified bound %s exceeds "
656 "the size %E of unterminated array")
657 : (maybe
658 ? G_("%qs specified bound %s may "
659 "exceed the size of at most %E "
660 "of unterminated array")
661 : G_("%qs specified bound %s exceeds "
662 "the size of at most %E "
663 "of unterminated array")),
664 fname, bndstr, size);
667 else
668 warned = warning_at (loc, OPT_Wstringop_overread,
669 "%qsargument missing terminating nul",
670 fname);
673 if (warned)
675 inform (DECL_SOURCE_LOCATION (decl),
676 "referenced argument declared here");
677 TREE_NO_WARNING (arg) = 1;
678 if (expr)
679 TREE_NO_WARNING (expr) = 1;
683 /* For a call EXPR (which may be null) that expects a string argument
684 SRC as an argument, returns false if SRC is a character array with
685 no terminating NUL. When nonnull, BOUND is the number of characters
686 in which to expect the terminating NUL. RDONLY is true for read-only
687 accesses such as strcmp, false for read-write such as strcpy. When
688 EXPR is also issues a warning. */
690 bool
691 check_nul_terminated_array (tree expr, tree src,
692 tree bound /* = NULL_TREE */)
694 /* The constant size of the array SRC points to. The actual size
695 may be less of EXACT is true, but not more. */
696 tree size;
697 /* True if SRC involves a non-constant offset into the array. */
698 bool exact;
699 /* The unterminated constant array SRC points to. */
700 tree nonstr = unterminated_array (src, &size, &exact);
701 if (!nonstr)
702 return true;
704 /* NONSTR refers to the non-nul terminated constant array and SIZE
705 is the constant size of the array in bytes. EXACT is true when
706 SIZE is exact. */
708 wide_int bndrng[2];
709 if (bound)
711 if (TREE_CODE (bound) == INTEGER_CST)
712 bndrng[0] = bndrng[1] = wi::to_wide (bound);
713 else
715 value_range_kind rng = get_range_info (bound, bndrng, bndrng + 1);
716 if (rng != VR_RANGE)
717 return true;
720 if (exact)
722 if (wi::leu_p (bndrng[0], wi::to_wide (size)))
723 return true;
725 else if (wi::lt_p (bndrng[0], wi::to_wide (size), UNSIGNED))
726 return true;
729 if (expr)
730 warn_string_no_nul (EXPR_LOCATION (expr), expr, NULL, src, nonstr,
731 size, exact, bound ? bndrng : NULL);
733 return false;
736 /* If EXP refers to an unterminated constant character array return
737 the declaration of the object of which the array is a member or
738 element and if SIZE is not null, set *SIZE to the size of
739 the unterminated array and set *EXACT if the size is exact or
740 clear it otherwise. Otherwise return null. */
742 tree
743 unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */)
745 /* C_STRLEN will return NULL and set DECL in the info
746 structure if EXP references a unterminated array. */
747 c_strlen_data lendata = { };
748 tree len = c_strlen (exp, 1, &lendata);
749 if (len == NULL_TREE && lendata.minlen && lendata.decl)
751 if (size)
753 len = lendata.minlen;
754 if (lendata.off)
756 /* Constant offsets are already accounted for in LENDATA.MINLEN,
757 but not in a SSA_NAME + CST expression. */
758 if (TREE_CODE (lendata.off) == INTEGER_CST)
759 *exact = true;
760 else if (TREE_CODE (lendata.off) == PLUS_EXPR
761 && TREE_CODE (TREE_OPERAND (lendata.off, 1)) == INTEGER_CST)
763 /* Subtract the offset from the size of the array. */
764 *exact = false;
765 tree temp = TREE_OPERAND (lendata.off, 1);
766 temp = fold_convert (ssizetype, temp);
767 len = fold_build2 (MINUS_EXPR, ssizetype, len, temp);
769 else
770 *exact = false;
772 else
773 *exact = true;
775 *size = len;
777 return lendata.decl;
780 return NULL_TREE;
783 /* Compute the length of a null-terminated character string or wide
784 character string handling character sizes of 1, 2, and 4 bytes.
785 TREE_STRING_LENGTH is not the right way because it evaluates to
786 the size of the character array in bytes (as opposed to characters)
787 and because it can contain a zero byte in the middle.
789 ONLY_VALUE should be nonzero if the result is not going to be emitted
790 into the instruction stream and zero if it is going to be expanded.
791 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
792 is returned, otherwise NULL, since
793 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
794 evaluate the side-effects.
796 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
797 accesses. Note that this implies the result is not going to be emitted
798 into the instruction stream.
800 Additional information about the string accessed may be recorded
801 in DATA. For example, if ARG references an unterminated string,
802 then the declaration will be stored in the DECL field. If the
803 length of the unterminated string can be determined, it'll be
804 stored in the LEN field. Note this length could well be different
805 than what a C strlen call would return.
807 ELTSIZE is 1 for normal single byte character strings, and 2 or
808 4 for wide characer strings. ELTSIZE is by default 1.
810 The value returned is of type `ssizetype'. */
812 tree
813 c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
815 /* If we were not passed a DATA pointer, then get one to a local
816 structure. That avoids having to check DATA for NULL before
817 each time we want to use it. */
818 c_strlen_data local_strlen_data = { };
819 if (!data)
820 data = &local_strlen_data;
822 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
824 tree src = STRIP_NOPS (arg);
825 if (TREE_CODE (src) == COND_EXPR
826 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
828 tree len1, len2;
830 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
831 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
832 if (tree_int_cst_equal (len1, len2))
833 return len1;
836 if (TREE_CODE (src) == COMPOUND_EXPR
837 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
838 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
840 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
842 /* Offset from the beginning of the string in bytes. */
843 tree byteoff;
844 tree memsize;
845 tree decl;
846 src = string_constant (src, &byteoff, &memsize, &decl);
847 if (src == 0)
848 return NULL_TREE;
850 /* Determine the size of the string element. */
851 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
852 return NULL_TREE;
854 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
855 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
856 in case the latter is less than the size of the array, such as when
857 SRC refers to a short string literal used to initialize a large array.
858 In that case, the elements of the array after the terminating NUL are
859 all NUL. */
860 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
861 strelts = strelts / eltsize;
863 if (!tree_fits_uhwi_p (memsize))
864 return NULL_TREE;
866 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
868 /* PTR can point to the byte representation of any string type, including
869 char* and wchar_t*. */
870 const char *ptr = TREE_STRING_POINTER (src);
872 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
874 /* The code below works only for single byte character types. */
875 if (eltsize != 1)
876 return NULL_TREE;
878 /* If the string has an internal NUL character followed by any
879 non-NUL characters (e.g., "foo\0bar"), we can't compute
880 the offset to the following NUL if we don't know where to
881 start searching for it. */
882 unsigned len = string_length (ptr, eltsize, strelts);
884 /* Return when an embedded null character is found or none at all.
885 In the latter case, set the DECL/LEN field in the DATA structure
886 so that callers may examine them. */
887 if (len + 1 < strelts)
888 return NULL_TREE;
889 else if (len >= maxelts)
891 data->decl = decl;
892 data->off = byteoff;
893 data->minlen = ssize_int (len);
894 return NULL_TREE;
897 /* For empty strings the result should be zero. */
898 if (len == 0)
899 return ssize_int (0);
901 /* We don't know the starting offset, but we do know that the string
902 has no internal zero bytes. If the offset falls within the bounds
903 of the string subtract the offset from the length of the string,
904 and return that. Otherwise the length is zero. Take care to
905 use SAVE_EXPR in case the OFFSET has side-effects. */
906 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
907 : byteoff;
908 offsave = fold_convert_loc (loc, sizetype, offsave);
909 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
910 size_int (len));
911 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
912 offsave);
913 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
914 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
915 build_zero_cst (ssizetype));
918 /* Offset from the beginning of the string in elements. */
919 HOST_WIDE_INT eltoff;
921 /* We have a known offset into the string. Start searching there for
922 a null character if we can represent it as a single HOST_WIDE_INT. */
923 if (byteoff == 0)
924 eltoff = 0;
925 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
926 eltoff = -1;
927 else
928 eltoff = tree_to_uhwi (byteoff) / eltsize;
930 /* If the offset is known to be out of bounds, warn, and call strlen at
931 runtime. */
932 if (eltoff < 0 || eltoff >= maxelts)
934 /* Suppress multiple warnings for propagated constant strings. */
935 if (only_value != 2
936 && !TREE_NO_WARNING (arg)
937 && warning_at (loc, OPT_Warray_bounds,
938 "offset %qwi outside bounds of constant string",
939 eltoff))
941 if (decl)
942 inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
943 TREE_NO_WARNING (arg) = 1;
945 return NULL_TREE;
948 /* If eltoff is larger than strelts but less than maxelts the
949 string length is zero, since the excess memory will be zero. */
950 if (eltoff > strelts)
951 return ssize_int (0);
953 /* Use strlen to search for the first zero byte. Since any strings
954 constructed with build_string will have nulls appended, we win even
955 if we get handed something like (char[4])"abcd".
957 Since ELTOFF is our starting index into the string, no further
958 calculation is needed. */
959 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
960 strelts - eltoff);
962 /* Don't know what to return if there was no zero termination.
963 Ideally this would turn into a gcc_checking_assert over time.
964 Set DECL/LEN so callers can examine them. */
965 if (len >= maxelts - eltoff)
967 data->decl = decl;
968 data->off = byteoff;
969 data->minlen = ssize_int (len);
970 return NULL_TREE;
973 return ssize_int (len);
976 /* Return a constant integer corresponding to target reading
977 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
978 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
979 are assumed to be zero, otherwise it reads as many characters
980 as needed. */
983 c_readstr (const char *str, scalar_int_mode mode,
984 bool null_terminated_p/*=true*/)
986 HOST_WIDE_INT ch;
987 unsigned int i, j;
988 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
990 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
991 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
992 / HOST_BITS_PER_WIDE_INT;
994 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
995 for (i = 0; i < len; i++)
996 tmp[i] = 0;
998 ch = 1;
999 for (i = 0; i < GET_MODE_SIZE (mode); i++)
1001 j = i;
1002 if (WORDS_BIG_ENDIAN)
1003 j = GET_MODE_SIZE (mode) - i - 1;
1004 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
1005 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
1006 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
1007 j *= BITS_PER_UNIT;
1009 if (ch || !null_terminated_p)
1010 ch = (unsigned char) str[i];
1011 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
1014 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
1015 return immed_wide_int_const (c, mode);
1018 /* Cast a target constant CST to target CHAR and if that value fits into
1019 host char type, return zero and put that value into variable pointed to by
1020 P. */
1022 static int
1023 target_char_cast (tree cst, char *p)
1025 unsigned HOST_WIDE_INT val, hostval;
1027 if (TREE_CODE (cst) != INTEGER_CST
1028 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
1029 return 1;
1031 /* Do not care if it fits or not right here. */
1032 val = TREE_INT_CST_LOW (cst);
1034 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
1035 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
1037 hostval = val;
1038 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
1039 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
1041 if (val != hostval)
1042 return 1;
1044 *p = hostval;
1045 return 0;
1048 /* Similar to save_expr, but assumes that arbitrary code is not executed
1049 in between the multiple evaluations. In particular, we assume that a
1050 non-addressable local variable will not be modified. */
1052 static tree
1053 builtin_save_expr (tree exp)
1055 if (TREE_CODE (exp) == SSA_NAME
1056 || (TREE_ADDRESSABLE (exp) == 0
1057 && (TREE_CODE (exp) == PARM_DECL
1058 || (VAR_P (exp) && !TREE_STATIC (exp)))))
1059 return exp;
1061 return save_expr (exp);
1064 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
1065 times to get the address of either a higher stack frame, or a return
1066 address located within it (depending on FNDECL_CODE). */
1068 static rtx
1069 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
1071 int i;
1072 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
1073 if (tem == NULL_RTX)
1075 /* For a zero count with __builtin_return_address, we don't care what
1076 frame address we return, because target-specific definitions will
1077 override us. Therefore frame pointer elimination is OK, and using
1078 the soft frame pointer is OK.
1080 For a nonzero count, or a zero count with __builtin_frame_address,
1081 we require a stable offset from the current frame pointer to the
1082 previous one, so we must use the hard frame pointer, and
1083 we must disable frame pointer elimination. */
1084 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
1085 tem = frame_pointer_rtx;
1086 else
1088 tem = hard_frame_pointer_rtx;
1090 /* Tell reload not to eliminate the frame pointer. */
1091 crtl->accesses_prior_frames = 1;
1095 if (count > 0)
1096 SETUP_FRAME_ADDRESSES ();
1098 /* On the SPARC, the return address is not in the frame, it is in a
1099 register. There is no way to access it off of the current frame
1100 pointer, but it can be accessed off the previous frame pointer by
1101 reading the value from the register window save area. */
1102 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
1103 count--;
1105 /* Scan back COUNT frames to the specified frame. */
1106 for (i = 0; i < count; i++)
1108 /* Assume the dynamic chain pointer is in the word that the
1109 frame address points to, unless otherwise specified. */
1110 tem = DYNAMIC_CHAIN_ADDRESS (tem);
1111 tem = memory_address (Pmode, tem);
1112 tem = gen_frame_mem (Pmode, tem);
1113 tem = copy_to_reg (tem);
1116 /* For __builtin_frame_address, return what we've got. But, on
1117 the SPARC for example, we may have to add a bias. */
1118 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
1119 return FRAME_ADDR_RTX (tem);
1121 /* For __builtin_return_address, get the return address from that frame. */
1122 #ifdef RETURN_ADDR_RTX
1123 tem = RETURN_ADDR_RTX (count, tem);
1124 #else
1125 tem = memory_address (Pmode,
1126 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
1127 tem = gen_frame_mem (Pmode, tem);
1128 #endif
1129 return tem;
1132 /* Alias set used for setjmp buffer. */
1133 static alias_set_type setjmp_alias_set = -1;
1135 /* Construct the leading half of a __builtin_setjmp call. Control will
1136 return to RECEIVER_LABEL. This is also called directly by the SJLJ
1137 exception handling code. */
1139 void
1140 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
1142 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1143 rtx stack_save;
1144 rtx mem;
1146 if (setjmp_alias_set == -1)
1147 setjmp_alias_set = new_alias_set ();
1149 buf_addr = convert_memory_address (Pmode, buf_addr);
1151 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
1153 /* We store the frame pointer and the address of receiver_label in
1154 the buffer and use the rest of it for the stack save area, which
1155 is machine-dependent. */
1157 mem = gen_rtx_MEM (Pmode, buf_addr);
1158 set_mem_alias_set (mem, setjmp_alias_set);
1159 emit_move_insn (mem, hard_frame_pointer_rtx);
1161 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1162 GET_MODE_SIZE (Pmode))),
1163 set_mem_alias_set (mem, setjmp_alias_set);
1165 emit_move_insn (validize_mem (mem),
1166 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
1168 stack_save = gen_rtx_MEM (sa_mode,
1169 plus_constant (Pmode, buf_addr,
1170 2 * GET_MODE_SIZE (Pmode)));
1171 set_mem_alias_set (stack_save, setjmp_alias_set);
1172 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1174 /* If there is further processing to do, do it. */
1175 if (targetm.have_builtin_setjmp_setup ())
1176 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
1178 /* We have a nonlocal label. */
1179 cfun->has_nonlocal_label = 1;
1182 /* Construct the trailing part of a __builtin_setjmp call. This is
1183 also called directly by the SJLJ exception handling code.
1184 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
1186 void
1187 expand_builtin_setjmp_receiver (rtx receiver_label)
1189 rtx chain;
1191 /* Mark the FP as used when we get here, so we have to make sure it's
1192 marked as used by this function. */
1193 emit_use (hard_frame_pointer_rtx);
1195 /* Mark the static chain as clobbered here so life information
1196 doesn't get messed up for it. */
1197 chain = rtx_for_static_chain (current_function_decl, true);
1198 if (chain && REG_P (chain))
1199 emit_clobber (chain);
1201 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
1203 /* If the argument pointer can be eliminated in favor of the
1204 frame pointer, we don't need to restore it. We assume here
1205 that if such an elimination is present, it can always be used.
1206 This is the case on all known machines; if we don't make this
1207 assumption, we do unnecessary saving on many machines. */
1208 size_t i;
1209 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1211 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1212 if (elim_regs[i].from == ARG_POINTER_REGNUM
1213 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1214 break;
1216 if (i == ARRAY_SIZE (elim_regs))
1218 /* Now restore our arg pointer from the address at which it
1219 was saved in our stack frame. */
1220 emit_move_insn (crtl->args.internal_arg_pointer,
1221 copy_to_reg (get_arg_pointer_save_area ()));
1225 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
1226 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
1227 else if (targetm.have_nonlocal_goto_receiver ())
1228 emit_insn (targetm.gen_nonlocal_goto_receiver ());
1229 else
1230 { /* Nothing */ }
1232 /* We must not allow the code we just generated to be reordered by
1233 scheduling. Specifically, the update of the frame pointer must
1234 happen immediately, not later. */
1235 emit_insn (gen_blockage ());
1238 /* __builtin_longjmp is passed a pointer to an array of five words (not
1239 all will be used on all machines). It operates similarly to the C
1240 library function of the same name, but is more efficient. Much of
1241 the code below is copied from the handling of non-local gotos. */
1243 static void
1244 expand_builtin_longjmp (rtx buf_addr, rtx value)
1246 rtx fp, lab, stack;
1247 rtx_insn *insn, *last;
1248 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1250 /* DRAP is needed for stack realign if longjmp is expanded to current
1251 function */
1252 if (SUPPORTS_STACK_ALIGNMENT)
1253 crtl->need_drap = true;
1255 if (setjmp_alias_set == -1)
1256 setjmp_alias_set = new_alias_set ();
1258 buf_addr = convert_memory_address (Pmode, buf_addr);
1260 buf_addr = force_reg (Pmode, buf_addr);
1262 /* We require that the user must pass a second argument of 1, because
1263 that is what builtin_setjmp will return. */
1264 gcc_assert (value == const1_rtx);
1266 last = get_last_insn ();
1267 if (targetm.have_builtin_longjmp ())
1268 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1269 else
1271 fp = gen_rtx_MEM (Pmode, buf_addr);
1272 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1273 GET_MODE_SIZE (Pmode)));
1275 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1276 2 * GET_MODE_SIZE (Pmode)));
1277 set_mem_alias_set (fp, setjmp_alias_set);
1278 set_mem_alias_set (lab, setjmp_alias_set);
1279 set_mem_alias_set (stack, setjmp_alias_set);
1281 /* Pick up FP, label, and SP from the block and jump. This code is
1282 from expand_goto in stmt.c; see there for detailed comments. */
1283 if (targetm.have_nonlocal_goto ())
1284 /* We have to pass a value to the nonlocal_goto pattern that will
1285 get copied into the static_chain pointer, but it does not matter
1286 what that value is, because builtin_setjmp does not use it. */
1287 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1288 else
1290 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1291 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1293 lab = copy_to_reg (lab);
1295 /* Restore the frame pointer and stack pointer. We must use a
1296 temporary since the setjmp buffer may be a local. */
1297 fp = copy_to_reg (fp);
1298 emit_stack_restore (SAVE_NONLOCAL, stack);
1300 /* Ensure the frame pointer move is not optimized. */
1301 emit_insn (gen_blockage ());
1302 emit_clobber (hard_frame_pointer_rtx);
1303 emit_clobber (frame_pointer_rtx);
1304 emit_move_insn (hard_frame_pointer_rtx, fp);
1306 emit_use (hard_frame_pointer_rtx);
1307 emit_use (stack_pointer_rtx);
1308 emit_indirect_jump (lab);
1312 /* Search backwards and mark the jump insn as a non-local goto.
1313 Note that this precludes the use of __builtin_longjmp to a
1314 __builtin_setjmp target in the same function. However, we've
1315 already cautioned the user that these functions are for
1316 internal exception handling use only. */
1317 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1319 gcc_assert (insn != last);
1321 if (JUMP_P (insn))
1323 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1324 break;
1326 else if (CALL_P (insn))
1327 break;
1331 static inline bool
1332 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1334 return (iter->i < iter->n);
1337 /* This function validates the types of a function call argument list
1338 against a specified list of tree_codes. If the last specifier is a 0,
1339 that represents an ellipsis, otherwise the last specifier must be a
1340 VOID_TYPE. */
1342 static bool
1343 validate_arglist (const_tree callexpr, ...)
1345 enum tree_code code;
1346 bool res = 0;
1347 va_list ap;
1348 const_call_expr_arg_iterator iter;
1349 const_tree arg;
1351 va_start (ap, callexpr);
1352 init_const_call_expr_arg_iterator (callexpr, &iter);
1354 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1355 tree fn = CALL_EXPR_FN (callexpr);
1356 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1358 for (unsigned argno = 1; ; ++argno)
1360 code = (enum tree_code) va_arg (ap, int);
1362 switch (code)
1364 case 0:
1365 /* This signifies an ellipses, any further arguments are all ok. */
1366 res = true;
1367 goto end;
1368 case VOID_TYPE:
1369 /* This signifies an endlink, if no arguments remain, return
1370 true, otherwise return false. */
1371 res = !more_const_call_expr_args_p (&iter);
1372 goto end;
1373 case POINTER_TYPE:
1374 /* The actual argument must be nonnull when either the whole
1375 called function has been declared nonnull, or when the formal
1376 argument corresponding to the actual argument has been. */
1377 if (argmap
1378 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1380 arg = next_const_call_expr_arg (&iter);
1381 if (!validate_arg (arg, code) || integer_zerop (arg))
1382 goto end;
1383 break;
1385 /* FALLTHRU */
1386 default:
1387 /* If no parameters remain or the parameter's code does not
1388 match the specified code, return false. Otherwise continue
1389 checking any remaining arguments. */
1390 arg = next_const_call_expr_arg (&iter);
1391 if (!validate_arg (arg, code))
1392 goto end;
1393 break;
1397 /* We need gotos here since we can only have one VA_CLOSE in a
1398 function. */
1399 end: ;
1400 va_end (ap);
1402 BITMAP_FREE (argmap);
1404 return res;
1407 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1408 and the address of the save area. */
1410 static rtx
1411 expand_builtin_nonlocal_goto (tree exp)
1413 tree t_label, t_save_area;
1414 rtx r_label, r_save_area, r_fp, r_sp;
1415 rtx_insn *insn;
1417 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1418 return NULL_RTX;
1420 t_label = CALL_EXPR_ARG (exp, 0);
1421 t_save_area = CALL_EXPR_ARG (exp, 1);
1423 r_label = expand_normal (t_label);
1424 r_label = convert_memory_address (Pmode, r_label);
1425 r_save_area = expand_normal (t_save_area);
1426 r_save_area = convert_memory_address (Pmode, r_save_area);
1427 /* Copy the address of the save location to a register just in case it was
1428 based on the frame pointer. */
1429 r_save_area = copy_to_reg (r_save_area);
1430 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1431 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1432 plus_constant (Pmode, r_save_area,
1433 GET_MODE_SIZE (Pmode)));
1435 crtl->has_nonlocal_goto = 1;
1437 /* ??? We no longer need to pass the static chain value, afaik. */
1438 if (targetm.have_nonlocal_goto ())
1439 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1440 else
1442 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1443 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1445 r_label = copy_to_reg (r_label);
1447 /* Restore the frame pointer and stack pointer. We must use a
1448 temporary since the setjmp buffer may be a local. */
1449 r_fp = copy_to_reg (r_fp);
1450 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1452 /* Ensure the frame pointer move is not optimized. */
1453 emit_insn (gen_blockage ());
1454 emit_clobber (hard_frame_pointer_rtx);
1455 emit_clobber (frame_pointer_rtx);
1456 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1458 /* USE of hard_frame_pointer_rtx added for consistency;
1459 not clear if really needed. */
1460 emit_use (hard_frame_pointer_rtx);
1461 emit_use (stack_pointer_rtx);
1463 /* If the architecture is using a GP register, we must
1464 conservatively assume that the target function makes use of it.
1465 The prologue of functions with nonlocal gotos must therefore
1466 initialize the GP register to the appropriate value, and we
1467 must then make sure that this value is live at the point
1468 of the jump. (Note that this doesn't necessarily apply
1469 to targets with a nonlocal_goto pattern; they are free
1470 to implement it in their own way. Note also that this is
1471 a no-op if the GP register is a global invariant.) */
1472 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1473 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1474 emit_use (pic_offset_table_rtx);
1476 emit_indirect_jump (r_label);
1479 /* Search backwards to the jump insn and mark it as a
1480 non-local goto. */
1481 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1483 if (JUMP_P (insn))
1485 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1486 break;
1488 else if (CALL_P (insn))
1489 break;
1492 return const0_rtx;
1495 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1496 (not all will be used on all machines) that was passed to __builtin_setjmp.
1497 It updates the stack pointer in that block to the current value. This is
1498 also called directly by the SJLJ exception handling code. */
1500 void
1501 expand_builtin_update_setjmp_buf (rtx buf_addr)
1503 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1504 buf_addr = convert_memory_address (Pmode, buf_addr);
1505 rtx stack_save
1506 = gen_rtx_MEM (sa_mode,
1507 memory_address
1508 (sa_mode,
1509 plus_constant (Pmode, buf_addr,
1510 2 * GET_MODE_SIZE (Pmode))));
1512 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1515 /* Expand a call to __builtin_prefetch. For a target that does not support
1516 data prefetch, evaluate the memory address argument in case it has side
1517 effects. */
1519 static void
1520 expand_builtin_prefetch (tree exp)
1522 tree arg0, arg1, arg2;
1523 int nargs;
1524 rtx op0, op1, op2;
1526 if (!validate_arglist (exp, POINTER_TYPE, 0))
1527 return;
1529 arg0 = CALL_EXPR_ARG (exp, 0);
1531 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1532 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1533 locality). */
1534 nargs = call_expr_nargs (exp);
1535 if (nargs > 1)
1536 arg1 = CALL_EXPR_ARG (exp, 1);
1537 else
1538 arg1 = integer_zero_node;
1539 if (nargs > 2)
1540 arg2 = CALL_EXPR_ARG (exp, 2);
1541 else
1542 arg2 = integer_three_node;
1544 /* Argument 0 is an address. */
1545 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1547 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1548 if (TREE_CODE (arg1) != INTEGER_CST)
1550 error ("second argument to %<__builtin_prefetch%> must be a constant");
1551 arg1 = integer_zero_node;
1553 op1 = expand_normal (arg1);
1554 /* Argument 1 must be either zero or one. */
1555 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1557 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1558 " using zero");
1559 op1 = const0_rtx;
1562 /* Argument 2 (locality) must be a compile-time constant int. */
1563 if (TREE_CODE (arg2) != INTEGER_CST)
1565 error ("third argument to %<__builtin_prefetch%> must be a constant");
1566 arg2 = integer_zero_node;
1568 op2 = expand_normal (arg2);
1569 /* Argument 2 must be 0, 1, 2, or 3. */
1570 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1572 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1573 op2 = const0_rtx;
1576 if (targetm.have_prefetch ())
1578 class expand_operand ops[3];
1580 create_address_operand (&ops[0], op0);
1581 create_integer_operand (&ops[1], INTVAL (op1));
1582 create_integer_operand (&ops[2], INTVAL (op2));
1583 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1584 return;
1587 /* Don't do anything with direct references to volatile memory, but
1588 generate code to handle other side effects. */
1589 if (!MEM_P (op0) && side_effects_p (op0))
1590 emit_insn (op0);
1593 /* Get a MEM rtx for expression EXP which is the address of an operand
1594 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
1595 the maximum length of the block of memory that might be accessed or
1596 NULL if unknown. */
1598 static rtx
1599 get_memory_rtx (tree exp, tree len)
1601 tree orig_exp = exp;
1602 rtx addr, mem;
1604 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1605 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1606 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1607 exp = TREE_OPERAND (exp, 0);
1609 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1610 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1612 /* Get an expression we can use to find the attributes to assign to MEM.
1613 First remove any nops. */
1614 while (CONVERT_EXPR_P (exp)
1615 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1616 exp = TREE_OPERAND (exp, 0);
1618 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1619 (as builtin stringops may alias with anything). */
1620 exp = fold_build2 (MEM_REF,
1621 build_array_type (char_type_node,
1622 build_range_type (sizetype,
1623 size_one_node, len)),
1624 exp, build_int_cst (ptr_type_node, 0));
1626 /* If the MEM_REF has no acceptable address, try to get the base object
1627 from the original address we got, and build an all-aliasing
1628 unknown-sized access to that one. */
1629 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1630 set_mem_attributes (mem, exp, 0);
1631 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1632 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1633 0))))
1635 exp = build_fold_addr_expr (exp);
1636 exp = fold_build2 (MEM_REF,
1637 build_array_type (char_type_node,
1638 build_range_type (sizetype,
1639 size_zero_node,
1640 NULL)),
1641 exp, build_int_cst (ptr_type_node, 0));
1642 set_mem_attributes (mem, exp, 0);
1644 set_mem_alias_set (mem, 0);
1645 return mem;
1648 /* Built-in functions to perform an untyped call and return. */
1650 #define apply_args_mode \
1651 (this_target_builtins->x_apply_args_mode)
1652 #define apply_result_mode \
1653 (this_target_builtins->x_apply_result_mode)
1655 /* Return the size required for the block returned by __builtin_apply_args,
1656 and initialize apply_args_mode. */
1658 static int
1659 apply_args_size (void)
1661 static int size = -1;
1662 int align;
1663 unsigned int regno;
1665 /* The values computed by this function never change. */
1666 if (size < 0)
1668 /* The first value is the incoming arg-pointer. */
1669 size = GET_MODE_SIZE (Pmode);
1671 /* The second value is the structure value address unless this is
1672 passed as an "invisible" first argument. */
1673 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1674 size += GET_MODE_SIZE (Pmode);
1676 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1677 if (FUNCTION_ARG_REGNO_P (regno))
1679 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1681 gcc_assert (mode != VOIDmode);
1683 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1684 if (size % align != 0)
1685 size = CEIL (size, align) * align;
1686 size += GET_MODE_SIZE (mode);
1687 apply_args_mode[regno] = mode;
1689 else
1691 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1694 return size;
1697 /* Return the size required for the block returned by __builtin_apply,
1698 and initialize apply_result_mode. */
1700 static int
1701 apply_result_size (void)
1703 static int size = -1;
1704 int align, regno;
1706 /* The values computed by this function never change. */
1707 if (size < 0)
1709 size = 0;
1711 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1712 if (targetm.calls.function_value_regno_p (regno))
1714 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1716 gcc_assert (mode != VOIDmode);
1718 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1719 if (size % align != 0)
1720 size = CEIL (size, align) * align;
1721 size += GET_MODE_SIZE (mode);
1722 apply_result_mode[regno] = mode;
1724 else
1725 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1727 /* Allow targets that use untyped_call and untyped_return to override
1728 the size so that machine-specific information can be stored here. */
1729 #ifdef APPLY_RESULT_SIZE
1730 size = APPLY_RESULT_SIZE;
1731 #endif
1733 return size;
1736 /* Create a vector describing the result block RESULT. If SAVEP is true,
1737 the result block is used to save the values; otherwise it is used to
1738 restore the values. */
1740 static rtx
1741 result_vector (int savep, rtx result)
1743 int regno, size, align, nelts;
1744 fixed_size_mode mode;
1745 rtx reg, mem;
1746 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1748 size = nelts = 0;
1749 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1750 if ((mode = apply_result_mode[regno]) != VOIDmode)
1752 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1753 if (size % align != 0)
1754 size = CEIL (size, align) * align;
1755 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1756 mem = adjust_address (result, mode, size);
1757 savevec[nelts++] = (savep
1758 ? gen_rtx_SET (mem, reg)
1759 : gen_rtx_SET (reg, mem));
1760 size += GET_MODE_SIZE (mode);
1762 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1765 /* Save the state required to perform an untyped call with the same
1766 arguments as were passed to the current function. */
1768 static rtx
1769 expand_builtin_apply_args_1 (void)
1771 rtx registers, tem;
1772 int size, align, regno;
1773 fixed_size_mode mode;
1774 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1776 /* Create a block where the arg-pointer, structure value address,
1777 and argument registers can be saved. */
1778 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1780 /* Walk past the arg-pointer and structure value address. */
1781 size = GET_MODE_SIZE (Pmode);
1782 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1783 size += GET_MODE_SIZE (Pmode);
1785 /* Save each register used in calling a function to the block. */
1786 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1787 if ((mode = apply_args_mode[regno]) != VOIDmode)
1789 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1790 if (size % align != 0)
1791 size = CEIL (size, align) * align;
1793 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1795 emit_move_insn (adjust_address (registers, mode, size), tem);
1796 size += GET_MODE_SIZE (mode);
1799 /* Save the arg pointer to the block. */
1800 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1801 /* We need the pointer as the caller actually passed them to us, not
1802 as we might have pretended they were passed. Make sure it's a valid
1803 operand, as emit_move_insn isn't expected to handle a PLUS. */
1804 if (STACK_GROWS_DOWNWARD)
1806 = force_operand (plus_constant (Pmode, tem,
1807 crtl->args.pretend_args_size),
1808 NULL_RTX);
1809 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1811 size = GET_MODE_SIZE (Pmode);
1813 /* Save the structure value address unless this is passed as an
1814 "invisible" first argument. */
1815 if (struct_incoming_value)
1816 emit_move_insn (adjust_address (registers, Pmode, size),
1817 copy_to_reg (struct_incoming_value));
1819 /* Return the address of the block. */
1820 return copy_addr_to_reg (XEXP (registers, 0));
1823 /* __builtin_apply_args returns block of memory allocated on
1824 the stack into which is stored the arg pointer, structure
1825 value address, static chain, and all the registers that might
1826 possibly be used in performing a function call. The code is
1827 moved to the start of the function so the incoming values are
1828 saved. */
1830 static rtx
1831 expand_builtin_apply_args (void)
1833 /* Don't do __builtin_apply_args more than once in a function.
1834 Save the result of the first call and reuse it. */
1835 if (apply_args_value != 0)
1836 return apply_args_value;
1838 /* When this function is called, it means that registers must be
1839 saved on entry to this function. So we migrate the
1840 call to the first insn of this function. */
1841 rtx temp;
1843 start_sequence ();
1844 temp = expand_builtin_apply_args_1 ();
1845 rtx_insn *seq = get_insns ();
1846 end_sequence ();
1848 apply_args_value = temp;
1850 /* Put the insns after the NOTE that starts the function.
1851 If this is inside a start_sequence, make the outer-level insn
1852 chain current, so the code is placed at the start of the
1853 function. If internal_arg_pointer is a non-virtual pseudo,
1854 it needs to be placed after the function that initializes
1855 that pseudo. */
1856 push_topmost_sequence ();
1857 if (REG_P (crtl->args.internal_arg_pointer)
1858 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1859 emit_insn_before (seq, parm_birth_insn);
1860 else
1861 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1862 pop_topmost_sequence ();
1863 return temp;
1867 /* Perform an untyped call and save the state required to perform an
1868 untyped return of whatever value was returned by the given function. */
1870 static rtx
1871 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1873 int size, align, regno;
1874 fixed_size_mode mode;
1875 rtx incoming_args, result, reg, dest, src;
1876 rtx_call_insn *call_insn;
1877 rtx old_stack_level = 0;
1878 rtx call_fusage = 0;
1879 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1881 arguments = convert_memory_address (Pmode, arguments);
1883 /* Create a block where the return registers can be saved. */
1884 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1886 /* Fetch the arg pointer from the ARGUMENTS block. */
1887 incoming_args = gen_reg_rtx (Pmode);
1888 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1889 if (!STACK_GROWS_DOWNWARD)
1890 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1891 incoming_args, 0, OPTAB_LIB_WIDEN);
1893 /* Push a new argument block and copy the arguments. Do not allow
1894 the (potential) memcpy call below to interfere with our stack
1895 manipulations. */
1896 do_pending_stack_adjust ();
1897 NO_DEFER_POP;
1899 /* Save the stack with nonlocal if available. */
1900 if (targetm.have_save_stack_nonlocal ())
1901 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1902 else
1903 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1905 /* Allocate a block of memory onto the stack and copy the memory
1906 arguments to the outgoing arguments address. We can pass TRUE
1907 as the 4th argument because we just saved the stack pointer
1908 and will restore it right after the call. */
1909 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1911 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1912 may have already set current_function_calls_alloca to true.
1913 current_function_calls_alloca won't be set if argsize is zero,
1914 so we have to guarantee need_drap is true here. */
1915 if (SUPPORTS_STACK_ALIGNMENT)
1916 crtl->need_drap = true;
1918 dest = virtual_outgoing_args_rtx;
1919 if (!STACK_GROWS_DOWNWARD)
1921 if (CONST_INT_P (argsize))
1922 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1923 else
1924 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1926 dest = gen_rtx_MEM (BLKmode, dest);
1927 set_mem_align (dest, PARM_BOUNDARY);
1928 src = gen_rtx_MEM (BLKmode, incoming_args);
1929 set_mem_align (src, PARM_BOUNDARY);
1930 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1932 /* Refer to the argument block. */
1933 apply_args_size ();
1934 arguments = gen_rtx_MEM (BLKmode, arguments);
1935 set_mem_align (arguments, PARM_BOUNDARY);
1937 /* Walk past the arg-pointer and structure value address. */
1938 size = GET_MODE_SIZE (Pmode);
1939 if (struct_value)
1940 size += GET_MODE_SIZE (Pmode);
1942 /* Restore each of the registers previously saved. Make USE insns
1943 for each of these registers for use in making the call. */
1944 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1945 if ((mode = apply_args_mode[regno]) != VOIDmode)
1947 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1948 if (size % align != 0)
1949 size = CEIL (size, align) * align;
1950 reg = gen_rtx_REG (mode, regno);
1951 emit_move_insn (reg, adjust_address (arguments, mode, size));
1952 use_reg (&call_fusage, reg);
1953 size += GET_MODE_SIZE (mode);
1956 /* Restore the structure value address unless this is passed as an
1957 "invisible" first argument. */
1958 size = GET_MODE_SIZE (Pmode);
1959 if (struct_value)
1961 rtx value = gen_reg_rtx (Pmode);
1962 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1963 emit_move_insn (struct_value, value);
1964 if (REG_P (struct_value))
1965 use_reg (&call_fusage, struct_value);
1968 /* All arguments and registers used for the call are set up by now! */
1969 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1971 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1972 and we don't want to load it into a register as an optimization,
1973 because prepare_call_address already did it if it should be done. */
1974 if (GET_CODE (function) != SYMBOL_REF)
1975 function = memory_address (FUNCTION_MODE, function);
1977 /* Generate the actual call instruction and save the return value. */
1978 if (targetm.have_untyped_call ())
1980 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1981 emit_call_insn (targetm.gen_untyped_call (mem, result,
1982 result_vector (1, result)));
1984 else if (targetm.have_call_value ())
1986 rtx valreg = 0;
1988 /* Locate the unique return register. It is not possible to
1989 express a call that sets more than one return register using
1990 call_value; use untyped_call for that. In fact, untyped_call
1991 only needs to save the return registers in the given block. */
1992 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1993 if ((mode = apply_result_mode[regno]) != VOIDmode)
1995 gcc_assert (!valreg); /* have_untyped_call required. */
1997 valreg = gen_rtx_REG (mode, regno);
2000 emit_insn (targetm.gen_call_value (valreg,
2001 gen_rtx_MEM (FUNCTION_MODE, function),
2002 const0_rtx, NULL_RTX, const0_rtx));
2004 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
2006 else
2007 gcc_unreachable ();
2009 /* Find the CALL insn we just emitted, and attach the register usage
2010 information. */
2011 call_insn = last_call_insn ();
2012 add_function_usage_to (call_insn, call_fusage);
2014 /* Restore the stack. */
2015 if (targetm.have_save_stack_nonlocal ())
2016 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
2017 else
2018 emit_stack_restore (SAVE_BLOCK, old_stack_level);
2019 fixup_args_size_notes (call_insn, get_last_insn (), 0);
2021 OK_DEFER_POP;
2023 /* Return the address of the result block. */
2024 result = copy_addr_to_reg (XEXP (result, 0));
2025 return convert_memory_address (ptr_mode, result);
2028 /* Perform an untyped return. */
2030 static void
2031 expand_builtin_return (rtx result)
2033 int size, align, regno;
2034 fixed_size_mode mode;
2035 rtx reg;
2036 rtx_insn *call_fusage = 0;
2038 result = convert_memory_address (Pmode, result);
2040 apply_result_size ();
2041 result = gen_rtx_MEM (BLKmode, result);
2043 if (targetm.have_untyped_return ())
2045 rtx vector = result_vector (0, result);
2046 emit_jump_insn (targetm.gen_untyped_return (result, vector));
2047 emit_barrier ();
2048 return;
2051 /* Restore the return value and note that each value is used. */
2052 size = 0;
2053 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2054 if ((mode = apply_result_mode[regno]) != VOIDmode)
2056 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2057 if (size % align != 0)
2058 size = CEIL (size, align) * align;
2059 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
2060 emit_move_insn (reg, adjust_address (result, mode, size));
2062 push_to_sequence (call_fusage);
2063 emit_use (reg);
2064 call_fusage = get_insns ();
2065 end_sequence ();
2066 size += GET_MODE_SIZE (mode);
2069 /* Put the USE insns before the return. */
2070 emit_insn (call_fusage);
2072 /* Return whatever values was restored by jumping directly to the end
2073 of the function. */
2074 expand_naked_return ();
2077 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
2079 static enum type_class
2080 type_to_class (tree type)
2082 switch (TREE_CODE (type))
2084 case VOID_TYPE: return void_type_class;
2085 case INTEGER_TYPE: return integer_type_class;
2086 case ENUMERAL_TYPE: return enumeral_type_class;
2087 case BOOLEAN_TYPE: return boolean_type_class;
2088 case POINTER_TYPE: return pointer_type_class;
2089 case REFERENCE_TYPE: return reference_type_class;
2090 case OFFSET_TYPE: return offset_type_class;
2091 case REAL_TYPE: return real_type_class;
2092 case COMPLEX_TYPE: return complex_type_class;
2093 case FUNCTION_TYPE: return function_type_class;
2094 case METHOD_TYPE: return method_type_class;
2095 case RECORD_TYPE: return record_type_class;
2096 case UNION_TYPE:
2097 case QUAL_UNION_TYPE: return union_type_class;
2098 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
2099 ? string_type_class : array_type_class);
2100 case LANG_TYPE: return lang_type_class;
2101 default: return no_type_class;
2105 /* Expand a call EXP to __builtin_classify_type. */
2107 static rtx
2108 expand_builtin_classify_type (tree exp)
2110 if (call_expr_nargs (exp))
2111 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
2112 return GEN_INT (no_type_class);
2115 /* This helper macro, meant to be used in mathfn_built_in below, determines
2116 which among a set of builtin math functions is appropriate for a given type
2117 mode. The `F' (float) and `L' (long double) are automatically generated
2118 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
2119 types, there are additional types that are considered with 'F32', 'F64',
2120 'F128', etc. suffixes. */
2121 #define CASE_MATHFN(MATHFN) \
2122 CASE_CFN_##MATHFN: \
2123 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
2124 fcodel = BUILT_IN_##MATHFN##L ; break;
2125 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
2126 types. */
2127 #define CASE_MATHFN_FLOATN(MATHFN) \
2128 CASE_CFN_##MATHFN: \
2129 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
2130 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
2131 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
2132 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
2133 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
2134 break;
2135 /* Similar to above, but appends _R after any F/L suffix. */
2136 #define CASE_MATHFN_REENT(MATHFN) \
2137 case CFN_BUILT_IN_##MATHFN##_R: \
2138 case CFN_BUILT_IN_##MATHFN##F_R: \
2139 case CFN_BUILT_IN_##MATHFN##L_R: \
2140 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
2141 fcodel = BUILT_IN_##MATHFN##L_R ; break;
2143 /* Return a function equivalent to FN but operating on floating-point
2144 values of type TYPE, or END_BUILTINS if no such function exists.
2145 This is purely an operation on function codes; it does not guarantee
2146 that the target actually has an implementation of the function. */
2148 static built_in_function
2149 mathfn_built_in_2 (tree type, combined_fn fn)
2151 tree mtype;
2152 built_in_function fcode, fcodef, fcodel;
2153 built_in_function fcodef16 = END_BUILTINS;
2154 built_in_function fcodef32 = END_BUILTINS;
2155 built_in_function fcodef64 = END_BUILTINS;
2156 built_in_function fcodef128 = END_BUILTINS;
2157 built_in_function fcodef32x = END_BUILTINS;
2158 built_in_function fcodef64x = END_BUILTINS;
2159 built_in_function fcodef128x = END_BUILTINS;
2161 switch (fn)
2163 CASE_MATHFN (ACOS)
2164 CASE_MATHFN (ACOSH)
2165 CASE_MATHFN (ASIN)
2166 CASE_MATHFN (ASINH)
2167 CASE_MATHFN (ATAN)
2168 CASE_MATHFN (ATAN2)
2169 CASE_MATHFN (ATANH)
2170 CASE_MATHFN (CBRT)
2171 CASE_MATHFN_FLOATN (CEIL)
2172 CASE_MATHFN (CEXPI)
2173 CASE_MATHFN_FLOATN (COPYSIGN)
2174 CASE_MATHFN (COS)
2175 CASE_MATHFN (COSH)
2176 CASE_MATHFN (DREM)
2177 CASE_MATHFN (ERF)
2178 CASE_MATHFN (ERFC)
2179 CASE_MATHFN (EXP)
2180 CASE_MATHFN (EXP10)
2181 CASE_MATHFN (EXP2)
2182 CASE_MATHFN (EXPM1)
2183 CASE_MATHFN (FABS)
2184 CASE_MATHFN (FDIM)
2185 CASE_MATHFN_FLOATN (FLOOR)
2186 CASE_MATHFN_FLOATN (FMA)
2187 CASE_MATHFN_FLOATN (FMAX)
2188 CASE_MATHFN_FLOATN (FMIN)
2189 CASE_MATHFN (FMOD)
2190 CASE_MATHFN (FREXP)
2191 CASE_MATHFN (GAMMA)
2192 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
2193 CASE_MATHFN (HUGE_VAL)
2194 CASE_MATHFN (HYPOT)
2195 CASE_MATHFN (ILOGB)
2196 CASE_MATHFN (ICEIL)
2197 CASE_MATHFN (IFLOOR)
2198 CASE_MATHFN (INF)
2199 CASE_MATHFN (IRINT)
2200 CASE_MATHFN (IROUND)
2201 CASE_MATHFN (ISINF)
2202 CASE_MATHFN (J0)
2203 CASE_MATHFN (J1)
2204 CASE_MATHFN (JN)
2205 CASE_MATHFN (LCEIL)
2206 CASE_MATHFN (LDEXP)
2207 CASE_MATHFN (LFLOOR)
2208 CASE_MATHFN (LGAMMA)
2209 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
2210 CASE_MATHFN (LLCEIL)
2211 CASE_MATHFN (LLFLOOR)
2212 CASE_MATHFN (LLRINT)
2213 CASE_MATHFN (LLROUND)
2214 CASE_MATHFN (LOG)
2215 CASE_MATHFN (LOG10)
2216 CASE_MATHFN (LOG1P)
2217 CASE_MATHFN (LOG2)
2218 CASE_MATHFN (LOGB)
2219 CASE_MATHFN (LRINT)
2220 CASE_MATHFN (LROUND)
2221 CASE_MATHFN (MODF)
2222 CASE_MATHFN (NAN)
2223 CASE_MATHFN (NANS)
2224 CASE_MATHFN_FLOATN (NEARBYINT)
2225 CASE_MATHFN (NEXTAFTER)
2226 CASE_MATHFN (NEXTTOWARD)
2227 CASE_MATHFN (POW)
2228 CASE_MATHFN (POWI)
2229 CASE_MATHFN (POW10)
2230 CASE_MATHFN (REMAINDER)
2231 CASE_MATHFN (REMQUO)
2232 CASE_MATHFN_FLOATN (RINT)
2233 CASE_MATHFN_FLOATN (ROUND)
2234 CASE_MATHFN_FLOATN (ROUNDEVEN)
2235 CASE_MATHFN (SCALB)
2236 CASE_MATHFN (SCALBLN)
2237 CASE_MATHFN (SCALBN)
2238 CASE_MATHFN (SIGNBIT)
2239 CASE_MATHFN (SIGNIFICAND)
2240 CASE_MATHFN (SIN)
2241 CASE_MATHFN (SINCOS)
2242 CASE_MATHFN (SINH)
2243 CASE_MATHFN_FLOATN (SQRT)
2244 CASE_MATHFN (TAN)
2245 CASE_MATHFN (TANH)
2246 CASE_MATHFN (TGAMMA)
2247 CASE_MATHFN_FLOATN (TRUNC)
2248 CASE_MATHFN (Y0)
2249 CASE_MATHFN (Y1)
2250 CASE_MATHFN (YN)
2252 default:
2253 return END_BUILTINS;
2256 mtype = TYPE_MAIN_VARIANT (type);
2257 if (mtype == double_type_node)
2258 return fcode;
2259 else if (mtype == float_type_node)
2260 return fcodef;
2261 else if (mtype == long_double_type_node)
2262 return fcodel;
2263 else if (mtype == float16_type_node)
2264 return fcodef16;
2265 else if (mtype == float32_type_node)
2266 return fcodef32;
2267 else if (mtype == float64_type_node)
2268 return fcodef64;
2269 else if (mtype == float128_type_node)
2270 return fcodef128;
2271 else if (mtype == float32x_type_node)
2272 return fcodef32x;
2273 else if (mtype == float64x_type_node)
2274 return fcodef64x;
2275 else if (mtype == float128x_type_node)
2276 return fcodef128x;
2277 else
2278 return END_BUILTINS;
2281 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2282 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2283 otherwise use the explicit declaration. If we can't do the conversion,
2284 return null. */
2286 static tree
2287 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2289 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2290 if (fcode2 == END_BUILTINS)
2291 return NULL_TREE;
2293 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2294 return NULL_TREE;
2296 return builtin_decl_explicit (fcode2);
2299 /* Like mathfn_built_in_1, but always use the implicit array. */
2301 tree
2302 mathfn_built_in (tree type, combined_fn fn)
2304 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2307 /* Like mathfn_built_in_1, but take a built_in_function and
2308 always use the implicit array. */
2310 tree
2311 mathfn_built_in (tree type, enum built_in_function fn)
2313 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2316 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2317 return its code, otherwise return IFN_LAST. Note that this function
2318 only tests whether the function is defined in internals.def, not whether
2319 it is actually available on the target. */
2321 internal_fn
2322 associated_internal_fn (tree fndecl)
2324 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2325 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2326 switch (DECL_FUNCTION_CODE (fndecl))
2328 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2329 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2330 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2331 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2332 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2333 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2334 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2335 #include "internal-fn.def"
2337 CASE_FLT_FN (BUILT_IN_POW10):
2338 return IFN_EXP10;
2340 CASE_FLT_FN (BUILT_IN_DREM):
2341 return IFN_REMAINDER;
2343 CASE_FLT_FN (BUILT_IN_SCALBN):
2344 CASE_FLT_FN (BUILT_IN_SCALBLN):
2345 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2346 return IFN_LDEXP;
2347 return IFN_LAST;
2349 default:
2350 return IFN_LAST;
2354 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2355 on the current target by a call to an internal function, return the
2356 code of that internal function, otherwise return IFN_LAST. The caller
2357 is responsible for ensuring that any side-effects of the built-in
2358 call are dealt with correctly. E.g. if CALL sets errno, the caller
2359 must decide that the errno result isn't needed or make it available
2360 in some other way. */
2362 internal_fn
2363 replacement_internal_fn (gcall *call)
2365 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2367 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2368 if (ifn != IFN_LAST)
2370 tree_pair types = direct_internal_fn_types (ifn, call);
2371 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2372 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2373 return ifn;
2376 return IFN_LAST;
2379 /* Expand a call to the builtin trinary math functions (fma).
2380 Return NULL_RTX if a normal call should be emitted rather than expanding the
2381 function in-line. EXP is the expression that is a call to the builtin
2382 function; if convenient, the result should be placed in TARGET.
2383 SUBTARGET may be used as the target for computing one of EXP's
2384 operands. */
2386 static rtx
2387 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2389 optab builtin_optab;
2390 rtx op0, op1, op2, result;
2391 rtx_insn *insns;
2392 tree fndecl = get_callee_fndecl (exp);
2393 tree arg0, arg1, arg2;
2394 machine_mode mode;
2396 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2397 return NULL_RTX;
2399 arg0 = CALL_EXPR_ARG (exp, 0);
2400 arg1 = CALL_EXPR_ARG (exp, 1);
2401 arg2 = CALL_EXPR_ARG (exp, 2);
2403 switch (DECL_FUNCTION_CODE (fndecl))
2405 CASE_FLT_FN (BUILT_IN_FMA):
2406 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2407 builtin_optab = fma_optab; break;
2408 default:
2409 gcc_unreachable ();
2412 /* Make a suitable register to place result in. */
2413 mode = TYPE_MODE (TREE_TYPE (exp));
2415 /* Before working hard, check whether the instruction is available. */
2416 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2417 return NULL_RTX;
2419 result = gen_reg_rtx (mode);
2421 /* Always stabilize the argument list. */
2422 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2423 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2424 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2426 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2427 op1 = expand_normal (arg1);
2428 op2 = expand_normal (arg2);
2430 start_sequence ();
2432 /* Compute into RESULT.
2433 Set RESULT to wherever the result comes back. */
2434 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2435 result, 0);
2437 /* If we were unable to expand via the builtin, stop the sequence
2438 (without outputting the insns) and call to the library function
2439 with the stabilized argument list. */
2440 if (result == 0)
2442 end_sequence ();
2443 return expand_call (exp, target, target == const0_rtx);
2446 /* Output the entire sequence. */
2447 insns = get_insns ();
2448 end_sequence ();
2449 emit_insn (insns);
2451 return result;
2454 /* Expand a call to the builtin sin and cos math functions.
2455 Return NULL_RTX if a normal call should be emitted rather than expanding the
2456 function in-line. EXP is the expression that is a call to the builtin
2457 function; if convenient, the result should be placed in TARGET.
2458 SUBTARGET may be used as the target for computing one of EXP's
2459 operands. */
2461 static rtx
2462 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2464 optab builtin_optab;
2465 rtx op0;
2466 rtx_insn *insns;
2467 tree fndecl = get_callee_fndecl (exp);
2468 machine_mode mode;
2469 tree arg;
2471 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2472 return NULL_RTX;
2474 arg = CALL_EXPR_ARG (exp, 0);
2476 switch (DECL_FUNCTION_CODE (fndecl))
2478 CASE_FLT_FN (BUILT_IN_SIN):
2479 CASE_FLT_FN (BUILT_IN_COS):
2480 builtin_optab = sincos_optab; break;
2481 default:
2482 gcc_unreachable ();
2485 /* Make a suitable register to place result in. */
2486 mode = TYPE_MODE (TREE_TYPE (exp));
2488 /* Check if sincos insn is available, otherwise fallback
2489 to sin or cos insn. */
2490 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2491 switch (DECL_FUNCTION_CODE (fndecl))
2493 CASE_FLT_FN (BUILT_IN_SIN):
2494 builtin_optab = sin_optab; break;
2495 CASE_FLT_FN (BUILT_IN_COS):
2496 builtin_optab = cos_optab; break;
2497 default:
2498 gcc_unreachable ();
2501 /* Before working hard, check whether the instruction is available. */
2502 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2504 rtx result = gen_reg_rtx (mode);
2506 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2507 need to expand the argument again. This way, we will not perform
2508 side-effects more the once. */
2509 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2511 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2513 start_sequence ();
2515 /* Compute into RESULT.
2516 Set RESULT to wherever the result comes back. */
2517 if (builtin_optab == sincos_optab)
2519 int ok;
2521 switch (DECL_FUNCTION_CODE (fndecl))
2523 CASE_FLT_FN (BUILT_IN_SIN):
2524 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2525 break;
2526 CASE_FLT_FN (BUILT_IN_COS):
2527 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2528 break;
2529 default:
2530 gcc_unreachable ();
2532 gcc_assert (ok);
2534 else
2535 result = expand_unop (mode, builtin_optab, op0, result, 0);
2537 if (result != 0)
2539 /* Output the entire sequence. */
2540 insns = get_insns ();
2541 end_sequence ();
2542 emit_insn (insns);
2543 return result;
2546 /* If we were unable to expand via the builtin, stop the sequence
2547 (without outputting the insns) and call to the library function
2548 with the stabilized argument list. */
2549 end_sequence ();
2552 return expand_call (exp, target, target == const0_rtx);
2555 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2556 return an RTL instruction code that implements the functionality.
2557 If that isn't possible or available return CODE_FOR_nothing. */
2559 static enum insn_code
2560 interclass_mathfn_icode (tree arg, tree fndecl)
2562 bool errno_set = false;
2563 optab builtin_optab = unknown_optab;
2564 machine_mode mode;
2566 switch (DECL_FUNCTION_CODE (fndecl))
2568 CASE_FLT_FN (BUILT_IN_ILOGB):
2569 errno_set = true; builtin_optab = ilogb_optab; break;
2570 CASE_FLT_FN (BUILT_IN_ISINF):
2571 builtin_optab = isinf_optab; break;
2572 case BUILT_IN_ISNORMAL:
2573 case BUILT_IN_ISFINITE:
2574 CASE_FLT_FN (BUILT_IN_FINITE):
2575 case BUILT_IN_FINITED32:
2576 case BUILT_IN_FINITED64:
2577 case BUILT_IN_FINITED128:
2578 case BUILT_IN_ISINFD32:
2579 case BUILT_IN_ISINFD64:
2580 case BUILT_IN_ISINFD128:
2581 /* These builtins have no optabs (yet). */
2582 break;
2583 default:
2584 gcc_unreachable ();
2587 /* There's no easy way to detect the case we need to set EDOM. */
2588 if (flag_errno_math && errno_set)
2589 return CODE_FOR_nothing;
2591 /* Optab mode depends on the mode of the input argument. */
2592 mode = TYPE_MODE (TREE_TYPE (arg));
2594 if (builtin_optab)
2595 return optab_handler (builtin_optab, mode);
2596 return CODE_FOR_nothing;
2599 /* Expand a call to one of the builtin math functions that operate on
2600 floating point argument and output an integer result (ilogb, isinf,
2601 isnan, etc).
2602 Return 0 if a normal call should be emitted rather than expanding the
2603 function in-line. EXP is the expression that is a call to the builtin
2604 function; if convenient, the result should be placed in TARGET. */
2606 static rtx
2607 expand_builtin_interclass_mathfn (tree exp, rtx target)
2609 enum insn_code icode = CODE_FOR_nothing;
2610 rtx op0;
2611 tree fndecl = get_callee_fndecl (exp);
2612 machine_mode mode;
2613 tree arg;
2615 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2616 return NULL_RTX;
2618 arg = CALL_EXPR_ARG (exp, 0);
2619 icode = interclass_mathfn_icode (arg, fndecl);
2620 mode = TYPE_MODE (TREE_TYPE (arg));
2622 if (icode != CODE_FOR_nothing)
2624 class expand_operand ops[1];
2625 rtx_insn *last = get_last_insn ();
2626 tree orig_arg = arg;
2628 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2629 need to expand the argument again. This way, we will not perform
2630 side-effects more the once. */
2631 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2633 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2635 if (mode != GET_MODE (op0))
2636 op0 = convert_to_mode (mode, op0, 0);
2638 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2639 if (maybe_legitimize_operands (icode, 0, 1, ops)
2640 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2641 return ops[0].value;
2643 delete_insns_since (last);
2644 CALL_EXPR_ARG (exp, 0) = orig_arg;
2647 return NULL_RTX;
2650 /* Expand a call to the builtin sincos math function.
2651 Return NULL_RTX if a normal call should be emitted rather than expanding the
2652 function in-line. EXP is the expression that is a call to the builtin
2653 function. */
2655 static rtx
2656 expand_builtin_sincos (tree exp)
2658 rtx op0, op1, op2, target1, target2;
2659 machine_mode mode;
2660 tree arg, sinp, cosp;
2661 int result;
2662 location_t loc = EXPR_LOCATION (exp);
2663 tree alias_type, alias_off;
2665 if (!validate_arglist (exp, REAL_TYPE,
2666 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2667 return NULL_RTX;
2669 arg = CALL_EXPR_ARG (exp, 0);
2670 sinp = CALL_EXPR_ARG (exp, 1);
2671 cosp = CALL_EXPR_ARG (exp, 2);
2673 /* Make a suitable register to place result in. */
2674 mode = TYPE_MODE (TREE_TYPE (arg));
2676 /* Check if sincos insn is available, otherwise emit the call. */
2677 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2678 return NULL_RTX;
2680 target1 = gen_reg_rtx (mode);
2681 target2 = gen_reg_rtx (mode);
2683 op0 = expand_normal (arg);
2684 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2685 alias_off = build_int_cst (alias_type, 0);
2686 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2687 sinp, alias_off));
2688 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2689 cosp, alias_off));
2691 /* Compute into target1 and target2.
2692 Set TARGET to wherever the result comes back. */
2693 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2694 gcc_assert (result);
2696 /* Move target1 and target2 to the memory locations indicated
2697 by op1 and op2. */
2698 emit_move_insn (op1, target1);
2699 emit_move_insn (op2, target2);
2701 return const0_rtx;
2704 /* Expand a call to the internal cexpi builtin to the sincos math function.
2705 EXP is the expression that is a call to the builtin function; if convenient,
2706 the result should be placed in TARGET. */
2708 static rtx
2709 expand_builtin_cexpi (tree exp, rtx target)
2711 tree fndecl = get_callee_fndecl (exp);
2712 tree arg, type;
2713 machine_mode mode;
2714 rtx op0, op1, op2;
2715 location_t loc = EXPR_LOCATION (exp);
2717 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2718 return NULL_RTX;
2720 arg = CALL_EXPR_ARG (exp, 0);
2721 type = TREE_TYPE (arg);
2722 mode = TYPE_MODE (TREE_TYPE (arg));
2724 /* Try expanding via a sincos optab, fall back to emitting a libcall
2725 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2726 is only generated from sincos, cexp or if we have either of them. */
2727 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2729 op1 = gen_reg_rtx (mode);
2730 op2 = gen_reg_rtx (mode);
2732 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2734 /* Compute into op1 and op2. */
2735 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2737 else if (targetm.libc_has_function (function_sincos))
2739 tree call, fn = NULL_TREE;
2740 tree top1, top2;
2741 rtx op1a, op2a;
2743 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2744 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2745 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2746 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2747 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2748 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2749 else
2750 gcc_unreachable ();
2752 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2753 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2754 op1a = copy_addr_to_reg (XEXP (op1, 0));
2755 op2a = copy_addr_to_reg (XEXP (op2, 0));
2756 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2757 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2759 /* Make sure not to fold the sincos call again. */
2760 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2761 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2762 call, 3, arg, top1, top2));
2764 else
2766 tree call, fn = NULL_TREE, narg;
2767 tree ctype = build_complex_type (type);
2769 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2770 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2771 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2772 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2773 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2774 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2775 else
2776 gcc_unreachable ();
2778 /* If we don't have a decl for cexp create one. This is the
2779 friendliest fallback if the user calls __builtin_cexpi
2780 without full target C99 function support. */
2781 if (fn == NULL_TREE)
2783 tree fntype;
2784 const char *name = NULL;
2786 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2787 name = "cexpf";
2788 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2789 name = "cexp";
2790 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2791 name = "cexpl";
2793 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2794 fn = build_fn_decl (name, fntype);
2797 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2798 build_real (type, dconst0), arg);
2800 /* Make sure not to fold the cexp call again. */
2801 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2802 return expand_expr (build_call_nary (ctype, call, 1, narg),
2803 target, VOIDmode, EXPAND_NORMAL);
2806 /* Now build the proper return type. */
2807 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2808 make_tree (TREE_TYPE (arg), op2),
2809 make_tree (TREE_TYPE (arg), op1)),
2810 target, VOIDmode, EXPAND_NORMAL);
2813 /* Conveniently construct a function call expression. FNDECL names the
2814 function to be called, N is the number of arguments, and the "..."
2815 parameters are the argument expressions. Unlike build_call_exr
2816 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2818 static tree
2819 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2821 va_list ap;
2822 tree fntype = TREE_TYPE (fndecl);
2823 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2825 va_start (ap, n);
2826 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2827 va_end (ap);
2828 SET_EXPR_LOCATION (fn, loc);
2829 return fn;
2832 /* Expand a call to one of the builtin rounding functions gcc defines
2833 as an extension (lfloor and lceil). As these are gcc extensions we
2834 do not need to worry about setting errno to EDOM.
2835 If expanding via optab fails, lower expression to (int)(floor(x)).
2836 EXP is the expression that is a call to the builtin function;
2837 if convenient, the result should be placed in TARGET. */
2839 static rtx
2840 expand_builtin_int_roundingfn (tree exp, rtx target)
2842 convert_optab builtin_optab;
2843 rtx op0, tmp;
2844 rtx_insn *insns;
2845 tree fndecl = get_callee_fndecl (exp);
2846 enum built_in_function fallback_fn;
2847 tree fallback_fndecl;
2848 machine_mode mode;
2849 tree arg;
2851 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2852 return NULL_RTX;
2854 arg = CALL_EXPR_ARG (exp, 0);
2856 switch (DECL_FUNCTION_CODE (fndecl))
2858 CASE_FLT_FN (BUILT_IN_ICEIL):
2859 CASE_FLT_FN (BUILT_IN_LCEIL):
2860 CASE_FLT_FN (BUILT_IN_LLCEIL):
2861 builtin_optab = lceil_optab;
2862 fallback_fn = BUILT_IN_CEIL;
2863 break;
2865 CASE_FLT_FN (BUILT_IN_IFLOOR):
2866 CASE_FLT_FN (BUILT_IN_LFLOOR):
2867 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2868 builtin_optab = lfloor_optab;
2869 fallback_fn = BUILT_IN_FLOOR;
2870 break;
2872 default:
2873 gcc_unreachable ();
2876 /* Make a suitable register to place result in. */
2877 mode = TYPE_MODE (TREE_TYPE (exp));
2879 target = gen_reg_rtx (mode);
2881 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2882 need to expand the argument again. This way, we will not perform
2883 side-effects more the once. */
2884 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2886 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2888 start_sequence ();
2890 /* Compute into TARGET. */
2891 if (expand_sfix_optab (target, op0, builtin_optab))
2893 /* Output the entire sequence. */
2894 insns = get_insns ();
2895 end_sequence ();
2896 emit_insn (insns);
2897 return target;
2900 /* If we were unable to expand via the builtin, stop the sequence
2901 (without outputting the insns). */
2902 end_sequence ();
2904 /* Fall back to floating point rounding optab. */
2905 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2907 /* For non-C99 targets we may end up without a fallback fndecl here
2908 if the user called __builtin_lfloor directly. In this case emit
2909 a call to the floor/ceil variants nevertheless. This should result
2910 in the best user experience for not full C99 targets. */
2911 if (fallback_fndecl == NULL_TREE)
2913 tree fntype;
2914 const char *name = NULL;
2916 switch (DECL_FUNCTION_CODE (fndecl))
2918 case BUILT_IN_ICEIL:
2919 case BUILT_IN_LCEIL:
2920 case BUILT_IN_LLCEIL:
2921 name = "ceil";
2922 break;
2923 case BUILT_IN_ICEILF:
2924 case BUILT_IN_LCEILF:
2925 case BUILT_IN_LLCEILF:
2926 name = "ceilf";
2927 break;
2928 case BUILT_IN_ICEILL:
2929 case BUILT_IN_LCEILL:
2930 case BUILT_IN_LLCEILL:
2931 name = "ceill";
2932 break;
2933 case BUILT_IN_IFLOOR:
2934 case BUILT_IN_LFLOOR:
2935 case BUILT_IN_LLFLOOR:
2936 name = "floor";
2937 break;
2938 case BUILT_IN_IFLOORF:
2939 case BUILT_IN_LFLOORF:
2940 case BUILT_IN_LLFLOORF:
2941 name = "floorf";
2942 break;
2943 case BUILT_IN_IFLOORL:
2944 case BUILT_IN_LFLOORL:
2945 case BUILT_IN_LLFLOORL:
2946 name = "floorl";
2947 break;
2948 default:
2949 gcc_unreachable ();
2952 fntype = build_function_type_list (TREE_TYPE (arg),
2953 TREE_TYPE (arg), NULL_TREE);
2954 fallback_fndecl = build_fn_decl (name, fntype);
2957 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2959 tmp = expand_normal (exp);
2960 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2962 /* Truncate the result of floating point optab to integer
2963 via expand_fix (). */
2964 target = gen_reg_rtx (mode);
2965 expand_fix (target, tmp, 0);
2967 return target;
2970 /* Expand a call to one of the builtin math functions doing integer
2971 conversion (lrint).
2972 Return 0 if a normal call should be emitted rather than expanding the
2973 function in-line. EXP is the expression that is a call to the builtin
2974 function; if convenient, the result should be placed in TARGET. */
2976 static rtx
2977 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2979 convert_optab builtin_optab;
2980 rtx op0;
2981 rtx_insn *insns;
2982 tree fndecl = get_callee_fndecl (exp);
2983 tree arg;
2984 machine_mode mode;
2985 enum built_in_function fallback_fn = BUILT_IN_NONE;
2987 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2988 return NULL_RTX;
2990 arg = CALL_EXPR_ARG (exp, 0);
2992 switch (DECL_FUNCTION_CODE (fndecl))
2994 CASE_FLT_FN (BUILT_IN_IRINT):
2995 fallback_fn = BUILT_IN_LRINT;
2996 gcc_fallthrough ();
2997 CASE_FLT_FN (BUILT_IN_LRINT):
2998 CASE_FLT_FN (BUILT_IN_LLRINT):
2999 builtin_optab = lrint_optab;
3000 break;
3002 CASE_FLT_FN (BUILT_IN_IROUND):
3003 fallback_fn = BUILT_IN_LROUND;
3004 gcc_fallthrough ();
3005 CASE_FLT_FN (BUILT_IN_LROUND):
3006 CASE_FLT_FN (BUILT_IN_LLROUND):
3007 builtin_optab = lround_optab;
3008 break;
3010 default:
3011 gcc_unreachable ();
3014 /* There's no easy way to detect the case we need to set EDOM. */
3015 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
3016 return NULL_RTX;
3018 /* Make a suitable register to place result in. */
3019 mode = TYPE_MODE (TREE_TYPE (exp));
3021 /* There's no easy way to detect the case we need to set EDOM. */
3022 if (!flag_errno_math)
3024 rtx result = gen_reg_rtx (mode);
3026 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3027 need to expand the argument again. This way, we will not perform
3028 side-effects more the once. */
3029 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3031 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
3033 start_sequence ();
3035 if (expand_sfix_optab (result, op0, builtin_optab))
3037 /* Output the entire sequence. */
3038 insns = get_insns ();
3039 end_sequence ();
3040 emit_insn (insns);
3041 return result;
3044 /* If we were unable to expand via the builtin, stop the sequence
3045 (without outputting the insns) and call to the library function
3046 with the stabilized argument list. */
3047 end_sequence ();
3050 if (fallback_fn != BUILT_IN_NONE)
3052 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
3053 targets, (int) round (x) should never be transformed into
3054 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
3055 a call to lround in the hope that the target provides at least some
3056 C99 functions. This should result in the best user experience for
3057 not full C99 targets. */
3058 tree fallback_fndecl = mathfn_built_in_1
3059 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
3061 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
3062 fallback_fndecl, 1, arg);
3064 target = expand_call (exp, NULL_RTX, target == const0_rtx);
3065 target = maybe_emit_group_store (target, TREE_TYPE (exp));
3066 return convert_to_mode (mode, target, 0);
3069 return expand_call (exp, target, target == const0_rtx);
3072 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3073 a normal call should be emitted rather than expanding the function
3074 in-line. EXP is the expression that is a call to the builtin
3075 function; if convenient, the result should be placed in TARGET. */
3077 static rtx
3078 expand_builtin_powi (tree exp, rtx target)
3080 tree arg0, arg1;
3081 rtx op0, op1;
3082 machine_mode mode;
3083 machine_mode mode2;
3085 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3086 return NULL_RTX;
3088 arg0 = CALL_EXPR_ARG (exp, 0);
3089 arg1 = CALL_EXPR_ARG (exp, 1);
3090 mode = TYPE_MODE (TREE_TYPE (exp));
3092 /* Emit a libcall to libgcc. */
3094 /* Mode of the 2nd argument must match that of an int. */
3095 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
3097 if (target == NULL_RTX)
3098 target = gen_reg_rtx (mode);
3100 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
3101 if (GET_MODE (op0) != mode)
3102 op0 = convert_to_mode (mode, op0, 0);
3103 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3104 if (GET_MODE (op1) != mode2)
3105 op1 = convert_to_mode (mode2, op1, 0);
3107 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3108 target, LCT_CONST, mode,
3109 op0, mode, op1, mode2);
3111 return target;
3114 /* Expand expression EXP which is a call to the strlen builtin. Return
3115 NULL_RTX if we failed and the caller should emit a normal call, otherwise
3116 try to get the result in TARGET, if convenient. */
3118 static rtx
3119 expand_builtin_strlen (tree exp, rtx target,
3120 machine_mode target_mode)
3122 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3123 return NULL_RTX;
3125 tree src = CALL_EXPR_ARG (exp, 0);
3126 if (!check_read_access (exp, src))
3127 return NULL_RTX;
3129 /* If the length can be computed at compile-time, return it. */
3130 if (tree len = c_strlen (src, 0))
3131 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3133 /* If the length can be computed at compile-time and is constant
3134 integer, but there are side-effects in src, evaluate
3135 src for side-effects, then return len.
3136 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3137 can be optimized into: i++; x = 3; */
3138 tree len = c_strlen (src, 1);
3139 if (len && TREE_CODE (len) == INTEGER_CST)
3141 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3142 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3145 unsigned int align = get_pointer_alignment (src) / BITS_PER_UNIT;
3147 /* If SRC is not a pointer type, don't do this operation inline. */
3148 if (align == 0)
3149 return NULL_RTX;
3151 /* Bail out if we can't compute strlen in the right mode. */
3152 machine_mode insn_mode;
3153 enum insn_code icode = CODE_FOR_nothing;
3154 FOR_EACH_MODE_FROM (insn_mode, target_mode)
3156 icode = optab_handler (strlen_optab, insn_mode);
3157 if (icode != CODE_FOR_nothing)
3158 break;
3160 if (insn_mode == VOIDmode)
3161 return NULL_RTX;
3163 /* Make a place to hold the source address. We will not expand
3164 the actual source until we are sure that the expansion will
3165 not fail -- there are trees that cannot be expanded twice. */
3166 rtx src_reg = gen_reg_rtx (Pmode);
3168 /* Mark the beginning of the strlen sequence so we can emit the
3169 source operand later. */
3170 rtx_insn *before_strlen = get_last_insn ();
3172 class expand_operand ops[4];
3173 create_output_operand (&ops[0], target, insn_mode);
3174 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3175 create_integer_operand (&ops[2], 0);
3176 create_integer_operand (&ops[3], align);
3177 if (!maybe_expand_insn (icode, 4, ops))
3178 return NULL_RTX;
3180 /* Check to see if the argument was declared attribute nonstring
3181 and if so, issue a warning since at this point it's not known
3182 to be nul-terminated. */
3183 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3185 /* Now that we are assured of success, expand the source. */
3186 start_sequence ();
3187 rtx pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3188 if (pat != src_reg)
3190 #ifdef POINTERS_EXTEND_UNSIGNED
3191 if (GET_MODE (pat) != Pmode)
3192 pat = convert_to_mode (Pmode, pat,
3193 POINTERS_EXTEND_UNSIGNED);
3194 #endif
3195 emit_move_insn (src_reg, pat);
3197 pat = get_insns ();
3198 end_sequence ();
3200 if (before_strlen)
3201 emit_insn_after (pat, before_strlen);
3202 else
3203 emit_insn_before (pat, get_insns ());
3205 /* Return the value in the proper mode for this function. */
3206 if (GET_MODE (ops[0].value) == target_mode)
3207 target = ops[0].value;
3208 else if (target != 0)
3209 convert_move (target, ops[0].value, 0);
3210 else
3211 target = convert_to_mode (target_mode, ops[0].value, 0);
3213 return target;
3216 /* Expand call EXP to the strnlen built-in, returning the result
3217 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3219 static rtx
3220 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3222 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3223 return NULL_RTX;
3225 tree src = CALL_EXPR_ARG (exp, 0);
3226 tree bound = CALL_EXPR_ARG (exp, 1);
3228 if (!bound)
3229 return NULL_RTX;
3231 check_read_access (exp, src, bound);
3233 location_t loc = UNKNOWN_LOCATION;
3234 if (EXPR_HAS_LOCATION (exp))
3235 loc = EXPR_LOCATION (exp);
3237 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3238 so these conversions aren't necessary. */
3239 c_strlen_data lendata = { };
3240 tree len = c_strlen (src, 0, &lendata, 1);
3241 if (len)
3242 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3244 if (TREE_CODE (bound) == INTEGER_CST)
3246 if (!len)
3247 return NULL_RTX;
3249 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3250 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3253 if (TREE_CODE (bound) != SSA_NAME)
3254 return NULL_RTX;
3256 wide_int min, max;
3257 enum value_range_kind rng = get_range_info (bound, &min, &max);
3258 if (rng != VR_RANGE)
3259 return NULL_RTX;
3261 if (!len || TREE_CODE (len) != INTEGER_CST)
3263 bool exact;
3264 lendata.decl = unterminated_array (src, &len, &exact);
3265 if (!lendata.decl)
3266 return NULL_RTX;
3269 if (lendata.decl)
3270 return NULL_RTX;
3272 if (wi::gtu_p (min, wi::to_wide (len)))
3273 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3275 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3276 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3279 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3280 bytes from bytes at DATA + OFFSET and return it reinterpreted as
3281 a target constant. */
3283 static rtx
3284 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3285 scalar_int_mode mode)
3287 /* The REPresentation pointed to by DATA need not be a nul-terminated
3288 string but the caller guarantees it's large enough for MODE. */
3289 const char *rep = (const char *) data;
3291 return c_readstr (rep + offset, mode, /*nul_terminated=*/false);
3294 /* LEN specify length of the block of memcpy/memset operation.
3295 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3296 In some cases we can make very likely guess on max size, then we
3297 set it into PROBABLE_MAX_SIZE. */
3299 static void
3300 determine_block_size (tree len, rtx len_rtx,
3301 unsigned HOST_WIDE_INT *min_size,
3302 unsigned HOST_WIDE_INT *max_size,
3303 unsigned HOST_WIDE_INT *probable_max_size)
3305 if (CONST_INT_P (len_rtx))
3307 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3308 return;
3310 else
3312 wide_int min, max;
3313 enum value_range_kind range_type = VR_UNDEFINED;
3315 /* Determine bounds from the type. */
3316 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3317 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3318 else
3319 *min_size = 0;
3320 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3321 *probable_max_size = *max_size
3322 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3323 else
3324 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3326 if (TREE_CODE (len) == SSA_NAME)
3327 range_type = get_range_info (len, &min, &max);
3328 if (range_type == VR_RANGE)
3330 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3331 *min_size = min.to_uhwi ();
3332 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3333 *probable_max_size = *max_size = max.to_uhwi ();
3335 else if (range_type == VR_ANTI_RANGE)
3337 /* Code like
3339 int n;
3340 if (n < 100)
3341 memcpy (a, b, n)
3343 Produce anti range allowing negative values of N. We still
3344 can use the information and make a guess that N is not negative.
3346 if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3347 *probable_max_size = min.to_uhwi () - 1;
3350 gcc_checking_assert (*max_size <=
3351 (unsigned HOST_WIDE_INT)
3352 GET_MODE_MASK (GET_MODE (len_rtx)));
3355 /* Issue a warning OPT for a bounded call EXP with a bound in RANGE
3356 accessing an object with SIZE. */
3358 static bool
3359 maybe_warn_for_bound (int opt, location_t loc, tree exp, tree func,
3360 tree bndrng[2], tree size, const access_data *pad = NULL)
3362 if (!bndrng[0] || TREE_NO_WARNING (exp))
3363 return false;
3365 tree maxobjsize = max_object_size ();
3367 bool warned = false;
3369 if (opt == OPT_Wstringop_overread)
3371 if (tree_int_cst_lt (maxobjsize, bndrng[0]))
3373 if (bndrng[0] == bndrng[1])
3374 warned = (func
3375 ? warning_at (loc, opt,
3376 "%K%qD specified bound %E "
3377 "exceeds maximum object size %E",
3378 exp, func, bndrng[0], maxobjsize)
3379 : warning_at (loc, opt,
3380 "%Kspecified bound %E "
3381 "exceeds maximum object size %E",
3382 exp, bndrng[0], maxobjsize));
3383 else
3384 warned = (func
3385 ? warning_at (loc, opt,
3386 "%K%qD specified bound [%E, %E] "
3387 "exceeds maximum object size %E",
3388 exp, func,
3389 bndrng[0], bndrng[1], maxobjsize)
3390 : warning_at (loc, opt,
3391 "%Kspecified bound [%E, %E] "
3392 "exceeds maximum object size %E",
3393 exp, bndrng[0], bndrng[1], maxobjsize));
3395 else if (!size || tree_int_cst_le (bndrng[0], size))
3396 return false;
3397 else if (tree_int_cst_equal (bndrng[0], bndrng[1]))
3398 warned = (func
3399 ? warning_at (loc, opt,
3400 "%K%qD specified bound %E exceeds "
3401 "source size %E",
3402 exp, func, bndrng[0], size)
3403 : warning_at (loc, opt,
3404 "%Kspecified bound %E exceeds "
3405 "source size %E",
3406 exp, bndrng[0], size));
3407 else
3408 warned = (func
3409 ? warning_at (loc, opt,
3410 "%K%qD specified bound [%E, %E] exceeds "
3411 "source size %E",
3412 exp, func, bndrng[0], bndrng[1], size)
3413 : warning_at (loc, opt,
3414 "%Kspecified bound [%E, %E] exceeds "
3415 "source size %E",
3416 exp, bndrng[0], bndrng[1], size));
3417 if (warned)
3419 if (pad && pad->src.ref)
3421 if (DECL_P (pad->src.ref))
3422 inform (DECL_SOURCE_LOCATION (pad->src.ref),
3423 "source object declared here");
3424 else if (EXPR_HAS_LOCATION (pad->src.ref))
3425 inform (EXPR_LOCATION (pad->src.ref),
3426 "source object allocated here");
3428 TREE_NO_WARNING (exp) = true;
3431 return warned;
3434 if (tree_int_cst_lt (maxobjsize, bndrng[0]))
3436 if (bndrng[0] == bndrng[1])
3437 warned = (func
3438 ? warning_at (loc, opt,
3439 "%K%qD specified size %E "
3440 "exceeds maximum object size %E",
3441 exp, func, bndrng[0], maxobjsize)
3442 : warning_at (loc, opt,
3443 "%Kspecified size %E "
3444 "exceeds maximum object size %E",
3445 exp, bndrng[0], maxobjsize));
3446 else
3447 warned = (func
3448 ? warning_at (loc, opt,
3449 "%K%qD specified size between %E and %E "
3450 "exceeds maximum object size %E",
3451 exp, func,
3452 bndrng[0], bndrng[1], maxobjsize)
3453 : warning_at (loc, opt,
3454 "%Kspecified size between %E and %E "
3455 "exceeds maximum object size %E",
3456 exp, bndrng[0], bndrng[1], maxobjsize));
3458 else if (!size || tree_int_cst_le (bndrng[0], size))
3459 return false;
3460 else if (tree_int_cst_equal (bndrng[0], bndrng[1]))
3461 warned = (func
3462 ? warning_at (loc, OPT_Wstringop_overflow_,
3463 "%K%qD specified bound %E exceeds "
3464 "destination size %E",
3465 exp, func, bndrng[0], size)
3466 : warning_at (loc, OPT_Wstringop_overflow_,
3467 "%Kspecified bound %E exceeds "
3468 "destination size %E",
3469 exp, bndrng[0], size));
3470 else
3471 warned = (func
3472 ? warning_at (loc, OPT_Wstringop_overflow_,
3473 "%K%qD specified bound [%E, %E] exceeds "
3474 "destination size %E",
3475 exp, func, bndrng[0], bndrng[1], size)
3476 : warning_at (loc, OPT_Wstringop_overflow_,
3477 "%Kspecified bound [%E, %E] exceeds "
3478 "destination size %E",
3479 exp, bndrng[0], bndrng[1], size));
3481 if (warned)
3483 if (pad && pad->dst.ref)
3484 inform (DECL_SOURCE_LOCATION (pad->dst.ref),
3485 "destination object declared here");
3486 TREE_NO_WARNING (exp) = true;
3489 return warned;
3492 /* For an expression EXP issue an access warning controlled by option OPT
3493 with access to a region SLEN bytes in size in the RANGE of sizes. */
3495 static bool
3496 warn_for_access (location_t loc, tree func, tree exp, tree range[2],
3497 tree slen, bool access)
3499 bool warned = false;
3501 if (access)
3503 if (tree_int_cst_equal (range[0], range[1]))
3504 warned = (func
3505 ? warning_n (loc, OPT_Wstringop_overread,
3506 tree_to_uhwi (range[0]),
3507 "%K%qD reading %E byte from a region of size %E",
3508 "%K%qD reading %E bytes from a region of size %E",
3509 exp, func, range[0], slen)
3510 : warning_n (loc, OPT_Wstringop_overread,
3511 tree_to_uhwi (range[0]),
3512 "%Kreading %E byte from a region of size %E",
3513 "%Kreading %E bytes from a region of size %E",
3514 exp, range[0], slen));
3515 else if (tree_int_cst_sign_bit (range[1]))
3517 /* Avoid printing the upper bound if it's invalid. */
3518 warned = (func
3519 ? warning_at (loc, OPT_Wstringop_overread,
3520 "%K%qD reading %E or more bytes from a region "
3521 "of size %E",
3522 exp, func, range[0], slen)
3523 : warning_at (loc, OPT_Wstringop_overread,
3524 "%Kreading %E or more bytes from a region "
3525 "of size %E",
3526 exp, range[0], slen));
3528 else
3529 warned = (func
3530 ? warning_at (loc, OPT_Wstringop_overread,
3531 "%K%qD reading between %E and %E bytes from "
3532 "a region of size %E",
3533 exp, func, range[0], range[1], slen)
3534 : warning_at (loc, OPT_Wstringop_overread,
3535 "%Kreading between %E and %E bytes from "
3536 "a region of size %E",
3537 exp, range[0], range[1], slen));
3539 if (warned)
3540 TREE_NO_WARNING (exp) = true;
3542 return warned;
3545 if (tree_int_cst_equal (range[0], range[1]))
3546 warned = (func
3547 ? warning_n (loc, OPT_Wstringop_overread,
3548 tree_to_uhwi (range[0]),
3549 "%K%qD epecting %E byte in a region of size %E",
3550 "%K%qD expecting %E bytes in a region of size %E",
3551 exp, func, range[0], slen)
3552 : warning_n (loc, OPT_Wstringop_overread,
3553 tree_to_uhwi (range[0]),
3554 "%Kexpecting %E byte in a region of size %E",
3555 "%Kexpecting %E bytes in a region of size %E",
3556 exp, range[0], slen));
3557 else if (tree_int_cst_sign_bit (range[1]))
3559 /* Avoid printing the upper bound if it's invalid. */
3560 warned = (func
3561 ? warning_at (loc, OPT_Wstringop_overread,
3562 "%K%qD expecting %E or more bytes in a region "
3563 "of size %E",
3564 exp, func, range[0], slen)
3565 : warning_at (loc, OPT_Wstringop_overread,
3566 "%Kexpecting %E or more bytes in a region "
3567 "of size %E",
3568 exp, range[0], slen));
3570 else
3571 warned = (func
3572 ? warning_at (loc, OPT_Wstringop_overread,
3573 "%K%qD expecting between %E and %E bytes in "
3574 "a region of size %E",
3575 exp, func, range[0], range[1], slen)
3576 : warning_at (loc, OPT_Wstringop_overread,
3577 "%Kexpectting between %E and %E bytes in "
3578 "a region of size %E",
3579 exp, range[0], range[1], slen));
3581 if (warned)
3582 TREE_NO_WARNING (exp) = true;
3584 return warned;
3587 /* Issue an inform message describing the target of an access REF.
3588 WRITE is set for a write access and clear for a read access. */
3590 static void
3591 inform_access (const access_ref &ref, access_mode mode)
3593 if (!ref.ref)
3594 return;
3596 /* Convert offset range and avoid including a zero range since it isn't
3597 necessarily meaningful. */
3598 long long minoff = 0, maxoff = 0;
3599 if (wi::fits_shwi_p (ref.offrng[0])
3600 && wi::fits_shwi_p (ref.offrng[1]))
3602 minoff = ref.offrng[0].to_shwi ();
3603 maxoff = ref.offrng[1].to_shwi ();
3606 /* Convert size range and always include it since all sizes are
3607 meaningful. */
3608 unsigned long long minsize = 0, maxsize = 0;
3609 if (wi::fits_shwi_p (ref.sizrng[0])
3610 && wi::fits_shwi_p (ref.sizrng[1]))
3612 minsize = ref.sizrng[0].to_shwi ();
3613 maxsize = ref.sizrng[1].to_shwi ();
3616 char sizestr[80];
3617 location_t loc;
3618 tree allocfn = NULL_TREE;
3619 if (TREE_CODE (ref.ref) == SSA_NAME)
3621 gimple *stmt = SSA_NAME_DEF_STMT (ref.ref);
3622 gcc_assert (is_gimple_call (stmt));
3623 loc = gimple_location (stmt);
3624 allocfn = gimple_call_fndecl (stmt);
3625 if (!allocfn)
3626 /* Handle calls through pointers to functions. */
3627 allocfn = gimple_call_fn (stmt);
3629 /* SIZRNG doesn't necessarily have the same range as the allocation
3630 size determined by gimple_call_alloc_size (). */
3632 if (minsize == maxsize)
3633 sprintf (sizestr, "%llu", minsize);
3634 else
3635 sprintf (sizestr, "[%llu, %llu]", minsize, maxsize);
3638 else
3639 loc = DECL_SOURCE_LOCATION (ref.ref);
3641 if (mode == access_read_write || mode == access_write_only)
3643 if (DECL_P (ref.ref))
3645 if (minoff == maxoff)
3647 if (minoff == 0)
3648 inform (loc, "destination object %qD", ref.ref);
3649 else
3650 inform (loc, "at offset %lli into destination object %qD",
3651 minoff, ref.ref);
3653 else
3654 inform (loc, "at offset [%lli, %lli] into destination object %qD",
3655 minoff, maxoff, ref.ref);
3656 return;
3659 if (minoff == maxoff)
3661 if (minoff == 0)
3662 inform (loc, "destination object of size %s allocated by %qE",
3663 sizestr, allocfn);
3664 else
3665 inform (loc,
3666 "at offset %lli into destination object of size %s "
3667 "allocated by %qE", minoff, sizestr, allocfn);
3669 else
3670 inform (loc,
3671 "at offset [%lli, %lli] into destination object of size %s "
3672 "allocated by %qE",
3673 minoff, maxoff, sizestr, allocfn);
3675 return;
3678 if (DECL_P (ref.ref))
3680 if (minoff == maxoff)
3682 if (minoff == 0)
3683 inform (loc, "source object %qD", ref.ref);
3684 else
3685 inform (loc, "at offset %lli into source object %qD",
3686 minoff, ref.ref);
3688 else
3689 inform (loc, "at offset [%lli, %lli] into source object %qD",
3690 minoff, maxoff, ref.ref);
3691 return;
3694 if (minoff == maxoff)
3696 if (minoff == 0)
3697 inform (loc, "source object of size %s allocated by %qE",
3698 sizestr, allocfn);
3699 else
3700 inform (loc,
3701 "at offset %lli into source object of size %s "
3702 "allocated by %qE", minoff, sizestr, allocfn);
3704 else
3705 inform (loc,
3706 "at offset [%lli, %lli] into source object of size %s "
3707 "allocated by %qE",
3708 minoff, maxoff, sizestr, allocfn);
3711 /* Helper to set RANGE to the range of BOUND if it's nonnull, bounded
3712 by BNDRNG if nonnull and valid. */
3714 static void
3715 get_size_range (tree bound, tree range[2], const offset_int bndrng[2])
3717 if (bound)
3718 get_size_range (bound, range);
3720 if (!bndrng || (bndrng[0] == 0 && bndrng[1] == HOST_WIDE_INT_M1U))
3721 return;
3723 if (range[0] && TREE_CODE (range[0]) == INTEGER_CST)
3725 offset_int r[] =
3726 { wi::to_offset (range[0]), wi::to_offset (range[1]) };
3727 if (r[0] < bndrng[0])
3728 range[0] = wide_int_to_tree (sizetype, bndrng[0]);
3729 if (bndrng[1] < r[1])
3730 range[1] = wide_int_to_tree (sizetype, bndrng[1]);
3732 else
3734 range[0] = wide_int_to_tree (sizetype, bndrng[0]);
3735 range[1] = wide_int_to_tree (sizetype, bndrng[1]);
3739 /* Try to verify that the sizes and lengths of the arguments to a string
3740 manipulation function given by EXP are within valid bounds and that
3741 the operation does not lead to buffer overflow or read past the end.
3742 Arguments other than EXP may be null. When non-null, the arguments
3743 have the following meaning:
3744 DST is the destination of a copy call or NULL otherwise.
3745 SRC is the source of a copy call or NULL otherwise.
3746 DSTWRITE is the number of bytes written into the destination obtained
3747 from the user-supplied size argument to the function (such as in
3748 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3749 MAXREAD is the user-supplied bound on the length of the source sequence
3750 (such as in strncat(d, s, N). It specifies the upper limit on the number
3751 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3752 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3753 expression EXP is a string function call (as opposed to a memory call
3754 like memcpy). As an exception, SRCSTR can also be an integer denoting
3755 the precomputed size of the source string or object (for functions like
3756 memcpy).
3757 DSTSIZE is the size of the destination object.
3759 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3760 SIZE_MAX.
3762 ACCESS is true for accesses, false for simple size checks in calls
3763 to functions that neither read from nor write to the region.
3765 When nonnull, PAD points to a more detailed description of the access.
3767 If the call is successfully verified as safe return true, otherwise
3768 return false. */
3770 bool
3771 check_access (tree exp, tree dstwrite,
3772 tree maxread, tree srcstr, tree dstsize,
3773 access_mode mode, const access_data *pad /* = NULL */)
3775 /* The size of the largest object is half the address space, or
3776 PTRDIFF_MAX. (This is way too permissive.) */
3777 tree maxobjsize = max_object_size ();
3779 /* Either an approximate/minimum the length of the source string for
3780 string functions or the size of the source object for raw memory
3781 functions. */
3782 tree slen = NULL_TREE;
3784 /* The range of the access in bytes; first set to the write access
3785 for functions that write and then read for those that also (or
3786 just) read. */
3787 tree range[2] = { NULL_TREE, NULL_TREE };
3789 /* Set to true when the exact number of bytes written by a string
3790 function like strcpy is not known and the only thing that is
3791 known is that it must be at least one (for the terminating nul). */
3792 bool at_least_one = false;
3793 if (srcstr)
3795 /* SRCSTR is normally a pointer to string but as a special case
3796 it can be an integer denoting the length of a string. */
3797 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3799 if (!check_nul_terminated_array (exp, srcstr, maxread))
3800 return false;
3801 /* Try to determine the range of lengths the source string
3802 refers to. If it can be determined and is less than
3803 the upper bound given by MAXREAD add one to it for
3804 the terminating nul. Otherwise, set it to one for
3805 the same reason, or to MAXREAD as appropriate. */
3806 c_strlen_data lendata = { };
3807 get_range_strlen (srcstr, &lendata, /* eltsize = */ 1);
3808 range[0] = lendata.minlen;
3809 range[1] = lendata.maxbound ? lendata.maxbound : lendata.maxlen;
3810 if (range[0]
3811 && TREE_CODE (range[0]) == INTEGER_CST
3812 && TREE_CODE (range[1]) == INTEGER_CST
3813 && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3815 if (maxread && tree_int_cst_le (maxread, range[0]))
3816 range[0] = range[1] = maxread;
3817 else
3818 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3819 range[0], size_one_node);
3821 if (maxread && tree_int_cst_le (maxread, range[1]))
3822 range[1] = maxread;
3823 else if (!integer_all_onesp (range[1]))
3824 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3825 range[1], size_one_node);
3827 slen = range[0];
3829 else
3831 at_least_one = true;
3832 slen = size_one_node;
3835 else
3836 slen = srcstr;
3839 if (!dstwrite && !maxread)
3841 /* When the only available piece of data is the object size
3842 there is nothing to do. */
3843 if (!slen)
3844 return true;
3846 /* Otherwise, when the length of the source sequence is known
3847 (as with strlen), set DSTWRITE to it. */
3848 if (!range[0])
3849 dstwrite = slen;
3852 if (!dstsize)
3853 dstsize = maxobjsize;
3855 /* Set RANGE to that of DSTWRITE if non-null, bounded by PAD->DST.BNDRNG
3856 if valid. */
3857 get_size_range (dstwrite, range, pad ? pad->dst.bndrng : NULL);
3859 tree func = get_callee_fndecl (exp);
3861 /* First check the number of bytes to be written against the maximum
3862 object size. */
3863 if (range[0]
3864 && TREE_CODE (range[0]) == INTEGER_CST
3865 && tree_int_cst_lt (maxobjsize, range[0]))
3867 location_t loc = tree_nonartificial_location (exp);
3868 loc = expansion_point_location_if_in_system_header (loc);
3870 maybe_warn_for_bound (OPT_Wstringop_overflow_, loc, exp, func, range,
3871 NULL_TREE, pad);
3872 return false;
3875 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3876 constant, and in range of unsigned HOST_WIDE_INT. */
3877 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3879 /* Next check the number of bytes to be written against the destination
3880 object size. */
3881 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3883 if (range[0]
3884 && TREE_CODE (range[0]) == INTEGER_CST
3885 && ((tree_fits_uhwi_p (dstsize)
3886 && tree_int_cst_lt (dstsize, range[0]))
3887 || (dstwrite
3888 && tree_fits_uhwi_p (dstwrite)
3889 && tree_int_cst_lt (dstwrite, range[0]))))
3891 if (TREE_NO_WARNING (exp)
3892 || (pad && pad->dst.ref && TREE_NO_WARNING (pad->dst.ref)))
3893 return false;
3895 location_t loc = tree_nonartificial_location (exp);
3896 loc = expansion_point_location_if_in_system_header (loc);
3898 bool warned = false;
3899 if (dstwrite == slen && at_least_one)
3901 /* This is a call to strcpy with a destination of 0 size
3902 and a source of unknown length. The call will write
3903 at least one byte past the end of the destination. */
3904 warned = (func
3905 ? warning_at (loc, OPT_Wstringop_overflow_,
3906 "%K%qD writing %E or more bytes into "
3907 "a region of size %E overflows "
3908 "the destination",
3909 exp, func, range[0], dstsize)
3910 : warning_at (loc, OPT_Wstringop_overflow_,
3911 "%Kwriting %E or more bytes into "
3912 "a region of size %E overflows "
3913 "the destination",
3914 exp, range[0], dstsize));
3916 else if (tree_int_cst_equal (range[0], range[1]))
3917 warned = (func
3918 ? warning_n (loc, OPT_Wstringop_overflow_,
3919 tree_to_uhwi (range[0]),
3920 "%K%qD writing %E byte into a region "
3921 "of size %E overflows the destination",
3922 "%K%qD writing %E bytes into a region "
3923 "of size %E overflows the destination",
3924 exp, func, range[0], dstsize)
3925 : warning_n (loc, OPT_Wstringop_overflow_,
3926 tree_to_uhwi (range[0]),
3927 "%Kwriting %E byte into a region "
3928 "of size %E overflows the destination",
3929 "%Kwriting %E bytes into a region "
3930 "of size %E overflows the destination",
3931 exp, range[0], dstsize));
3932 else if (tree_int_cst_sign_bit (range[1]))
3934 /* Avoid printing the upper bound if it's invalid. */
3935 warned = (func
3936 ? warning_at (loc, OPT_Wstringop_overflow_,
3937 "%K%qD writing %E or more bytes into "
3938 "a region of size %E overflows "
3939 "the destination",
3940 exp, func, range[0], dstsize)
3941 : warning_at (loc, OPT_Wstringop_overflow_,
3942 "%Kwriting %E or more bytes into "
3943 "a region of size %E overflows "
3944 "the destination",
3945 exp, range[0], dstsize));
3947 else
3948 warned = (func
3949 ? warning_at (loc, OPT_Wstringop_overflow_,
3950 "%K%qD writing between %E and %E bytes "
3951 "into a region of size %E overflows "
3952 "the destination",
3953 exp, func, range[0], range[1],
3954 dstsize)
3955 : warning_at (loc, OPT_Wstringop_overflow_,
3956 "%Kwriting between %E and %E bytes "
3957 "into a region of size %E overflows "
3958 "the destination",
3959 exp, range[0], range[1],
3960 dstsize));
3961 if (warned)
3963 TREE_NO_WARNING (exp) = true;
3964 if (pad)
3965 inform_access (pad->dst, pad->mode);
3968 /* Return error when an overflow has been detected. */
3969 return false;
3973 /* Check the maximum length of the source sequence against the size
3974 of the destination object if known, or against the maximum size
3975 of an object. */
3976 if (maxread)
3978 /* Set RANGE to that of MAXREAD, bounded by PAD->SRC.BNDRNG if
3979 PAD is nonnull and BNDRNG is valid. */
3980 get_size_range (maxread, range, pad ? pad->src.bndrng : NULL);
3982 location_t loc = tree_nonartificial_location (exp);
3983 loc = expansion_point_location_if_in_system_header (loc);
3985 tree size = dstsize;
3986 if (pad && pad->mode == access_read_only)
3987 size = wide_int_to_tree (sizetype, pad->src.sizrng[1]);
3989 if (range[0] && maxread && tree_fits_uhwi_p (size))
3991 if (tree_int_cst_lt (maxobjsize, range[0]))
3993 maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp, func,
3994 range, size, pad);
3995 return false;
3998 if (size != maxobjsize && tree_int_cst_lt (size, range[0]))
4000 int opt = (dstwrite || mode != access_read_only
4001 ? OPT_Wstringop_overflow_
4002 : OPT_Wstringop_overread);
4003 maybe_warn_for_bound (opt, loc, exp, func, range, size, pad);
4004 return false;
4008 maybe_warn_nonstring_arg (func, exp);
4011 /* Check for reading past the end of SRC. */
4012 bool overread = (slen
4013 && slen == srcstr
4014 && dstwrite
4015 && range[0]
4016 && TREE_CODE (slen) == INTEGER_CST
4017 && tree_int_cst_lt (slen, range[0]));
4019 if (!overread && pad && pad->src.sizrng[1] >= 0 && pad->src.offrng[0] >= 0)
4021 /* Set RANGE to that of MAXREAD, bounded by PAD->SRC.BNDRNG if
4022 PAD is nonnull and BNDRNG is valid. */
4023 get_size_range (maxread, range, pad ? pad->src.bndrng : NULL);
4024 /* Set OVERREAD for reads starting just past the end of an object. */
4025 overread = pad->src.sizrng[1] - pad->src.offrng[0] < pad->src.bndrng[0];
4026 range[0] = wide_int_to_tree (sizetype, pad->src.bndrng[0]);
4027 slen = size_zero_node;
4030 if (overread)
4032 if (TREE_NO_WARNING (exp)
4033 || (srcstr && TREE_NO_WARNING (srcstr))
4034 || (pad && pad->src.ref && TREE_NO_WARNING (pad->src.ref)))
4035 return false;
4037 location_t loc = tree_nonartificial_location (exp);
4038 loc = expansion_point_location_if_in_system_header (loc);
4040 if (warn_for_access (loc, func, exp, range, slen, mode)
4041 && pad)
4042 inform_access (pad->src, access_read_only);
4044 return false;
4047 return true;
4050 /* A convenience wrapper for check_access above to check access
4051 by a read-only function like puts. */
4053 static bool
4054 check_read_access (tree exp, tree src, tree bound /* = NULL_TREE */,
4055 int ost /* = 1 */)
4057 if (!warn_stringop_overread)
4058 return true;
4060 access_data data (exp, access_read_only, NULL_TREE, false, bound, true);
4061 compute_objsize (src, ost, &data.src);
4062 return check_access (exp, /*dstwrite=*/ NULL_TREE, /*maxread=*/ bound,
4063 /*srcstr=*/ src, /*dstsize=*/ NULL_TREE, data.mode,
4064 &data);
4067 /* If STMT is a call to an allocation function, returns the constant
4068 size of the object allocated by the call represented as sizetype.
4069 If nonnull, sets RNG1[] to the range of the size. */
4071 tree
4072 gimple_call_alloc_size (gimple *stmt, wide_int rng1[2] /* = NULL */,
4073 const vr_values *rvals /* = NULL */)
4075 if (!stmt)
4076 return NULL_TREE;
4078 tree allocfntype;
4079 if (tree fndecl = gimple_call_fndecl (stmt))
4080 allocfntype = TREE_TYPE (fndecl);
4081 else
4082 allocfntype = gimple_call_fntype (stmt);
4084 if (!allocfntype)
4085 return NULL_TREE;
4087 unsigned argidx1 = UINT_MAX, argidx2 = UINT_MAX;
4088 tree at = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (allocfntype));
4089 if (!at)
4091 if (!gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
4092 return NULL_TREE;
4094 argidx1 = 0;
4097 unsigned nargs = gimple_call_num_args (stmt);
4099 if (argidx1 == UINT_MAX)
4101 tree atval = TREE_VALUE (at);
4102 if (!atval)
4103 return NULL_TREE;
4105 argidx1 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
4106 if (nargs <= argidx1)
4107 return NULL_TREE;
4109 atval = TREE_CHAIN (atval);
4110 if (atval)
4112 argidx2 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
4113 if (nargs <= argidx2)
4114 return NULL_TREE;
4118 tree size = gimple_call_arg (stmt, argidx1);
4120 wide_int rng1_buf[2];
4121 /* If RNG1 is not set, use the buffer. */
4122 if (!rng1)
4123 rng1 = rng1_buf;
4125 if (!get_range (size, rng1, rvals))
4126 return NULL_TREE;
4128 if (argidx2 > nargs && TREE_CODE (size) == INTEGER_CST)
4129 return fold_convert (sizetype, size);
4131 /* To handle ranges do the math in wide_int and return the product
4132 of the upper bounds as a constant. Ignore anti-ranges. */
4133 tree n = argidx2 < nargs ? gimple_call_arg (stmt, argidx2) : integer_one_node;
4134 wide_int rng2[2];
4135 if (!get_range (n, rng2, rvals))
4136 return NULL_TREE;
4138 /* Extend to the maximum precision to avoid overflow. */
4139 const int prec = ADDR_MAX_PRECISION;
4140 rng1[0] = wide_int::from (rng1[0], prec, UNSIGNED);
4141 rng1[1] = wide_int::from (rng1[1], prec, UNSIGNED);
4142 rng2[0] = wide_int::from (rng2[0], prec, UNSIGNED);
4143 rng2[1] = wide_int::from (rng2[1], prec, UNSIGNED);
4145 /* Compute products of both bounds for the caller but return the lesser
4146 of SIZE_MAX and the product of the upper bounds as a constant. */
4147 rng1[0] = rng1[0] * rng2[0];
4148 rng1[1] = rng1[1] * rng2[1];
4149 tree size_max = TYPE_MAX_VALUE (sizetype);
4150 if (wi::gtu_p (rng1[1], wi::to_wide (size_max, prec)))
4152 rng1[1] = wi::to_wide (size_max);
4153 return size_max;
4156 return wide_int_to_tree (sizetype, rng1[1]);
4159 /* Wrapper around the wide_int overload of get_range. Returns the same
4160 result but accepts offset_int instead. */
4162 static bool
4163 get_range (tree x, signop sgn, offset_int r[2],
4164 const vr_values *rvals /* = NULL */)
4166 wide_int wr[2];
4167 if (!get_range (x, wr, rvals))
4168 return false;
4170 r[0] = offset_int::from (wr[0], sgn);
4171 r[1] = offset_int::from (wr[1], sgn);
4172 return true;
4175 /* Helper to compute the size of the object referenced by the PTR
4176 expression which must have pointer type, using Object Size type
4177 OSTYPE (only the least significant 2 bits are used).
4178 On success, sets PREF->REF to the DECL of the referenced object
4179 if it's unique, otherwise to null, PREF->OFFRNG to the range of
4180 offsets into it, and PREF->SIZRNG to the range of sizes of
4181 the object(s).
4182 VISITED is used to avoid visiting the same PHI operand multiple
4183 times, and, when nonnull, RVALS to determine range information.
4184 Returns true on success, false when the size cannot be determined.
4186 The function is intended for diagnostics and should not be used
4187 to influence code generation or optimization. */
4189 static bool
4190 compute_objsize (tree ptr, int ostype, access_ref *pref,
4191 bitmap *visited, const vr_values *rvals /* = NULL */)
4193 const bool addr = TREE_CODE (ptr) == ADDR_EXPR;
4194 if (addr)
4195 ptr = TREE_OPERAND (ptr, 0);
4197 if (DECL_P (ptr))
4199 /* Bail if the reference is to the pointer itself (as opposed
4200 to what it points to). */
4201 if (!addr && POINTER_TYPE_P (TREE_TYPE (ptr)))
4202 return false;
4204 tree size = decl_init_size (ptr, false);
4205 if (!size || TREE_CODE (size) != INTEGER_CST)
4206 return false;
4208 pref->ref = ptr;
4209 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
4210 return true;
4213 const tree_code code = TREE_CODE (ptr);
4215 if (code == COMPONENT_REF)
4217 tree field = TREE_OPERAND (ptr, 1);
4219 if (ostype == 0)
4221 /* For raw memory functions like memcpy bail if the size
4222 of the enclosing object cannot be determined. */
4223 tree ref = TREE_OPERAND (ptr, 0);
4224 if (!compute_objsize (ref, ostype, pref, visited, rvals)
4225 || !pref->ref)
4226 return false;
4228 /* Otherwise, use the size of the enclosing object and add
4229 the offset of the member to the offset computed so far. */
4230 tree offset = byte_position (field);
4231 if (TREE_CODE (offset) != INTEGER_CST)
4232 return false;
4233 offset_int off = wi::to_offset (offset);
4234 pref->offrng[0] += off;
4235 pref->offrng[1] += off;
4236 return true;
4239 /* Bail if the reference is to the pointer itself (as opposed
4240 to what it points to). */
4241 if (!addr && POINTER_TYPE_P (TREE_TYPE (field)))
4242 return false;
4244 pref->ref = field;
4245 /* Only return constant sizes for now while callers depend
4246 on it. INT0LEN is true for interior zero-length arrays. */
4247 bool int0len = false;
4248 tree size = component_ref_size (ptr, &int0len);
4249 if (int0len)
4251 pref->sizrng[0] = pref->sizrng[1] = 0;
4252 return true;
4255 if (!size || TREE_CODE (size) != INTEGER_CST)
4256 return false;
4258 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
4259 return true;
4262 if (code == ARRAY_REF || code == MEM_REF)
4264 tree ref = TREE_OPERAND (ptr, 0);
4265 tree reftype = TREE_TYPE (ref);
4266 if (code == ARRAY_REF
4267 && TREE_CODE (TREE_TYPE (reftype)) == POINTER_TYPE)
4268 /* Avoid arrays of pointers. FIXME: Hande pointers to arrays
4269 of known bound. */
4270 return false;
4272 if (code == MEM_REF && TREE_CODE (reftype) == POINTER_TYPE)
4274 /* Give up for MEM_REFs of vector types; those may be synthesized
4275 from multiple assignments to consecutive data members. See PR
4276 93200.
4277 FIXME: Deal with this more generally, e.g., by marking up such
4278 MEM_REFs at the time they're created. */
4279 reftype = TREE_TYPE (reftype);
4280 if (TREE_CODE (reftype) == VECTOR_TYPE)
4281 return false;
4284 if (!compute_objsize (ref, ostype, pref, visited, rvals))
4285 return false;
4287 offset_int orng[2];
4288 tree off = TREE_OPERAND (ptr, 1);
4289 if (!get_range (off, SIGNED, orng, rvals))
4290 /* Fail unless the size of the object is zero. */
4291 return pref->sizrng[0] == 0 && pref->sizrng[0] == pref->sizrng[1];
4293 if (TREE_CODE (ptr) == ARRAY_REF)
4295 /* Convert the array index range determined above to a byte
4296 offset. */
4297 tree lowbnd = array_ref_low_bound (ptr);
4298 if (!integer_zerop (lowbnd) && tree_fits_uhwi_p (lowbnd))
4300 /* Adjust the index by the low bound of the array domain
4301 (normally zero but 1 in Fortran). */
4302 unsigned HOST_WIDE_INT lb = tree_to_uhwi (lowbnd);
4303 orng[0] -= lb;
4304 orng[1] -= lb;
4307 tree eltype = TREE_TYPE (ptr);
4308 tree tpsize = TYPE_SIZE_UNIT (eltype);
4309 if (!tpsize || TREE_CODE (tpsize) != INTEGER_CST)
4310 return false;
4312 offset_int sz = wi::to_offset (tpsize);
4313 orng[0] *= sz;
4314 orng[1] *= sz;
4316 if (ostype && TREE_CODE (eltype) == ARRAY_TYPE)
4318 /* Execpt for the permissive raw memory functions which
4319 use the size of the whole object determined above,
4320 use the size of the referenced array. */
4321 pref->sizrng[0] = pref->offrng[0] + orng[0] + sz;
4322 pref->sizrng[1] = pref->offrng[1] + orng[1] + sz;
4326 pref->offrng[0] += orng[0];
4327 pref->offrng[1] += orng[1];
4329 return true;
4332 if (TREE_CODE (ptr) == SSA_NAME)
4334 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
4335 if (is_gimple_call (stmt))
4337 /* If STMT is a call to an allocation function get the size
4338 from its argument(s). If successful, also set *PDECL to
4339 PTR for the caller to include in diagnostics. */
4340 wide_int wr[2];
4341 if (gimple_call_alloc_size (stmt, wr, rvals))
4343 pref->ref = ptr;
4344 pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
4345 pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
4346 return true;
4348 return false;
4351 /* TODO: Handle PHI. */
4353 if (!is_gimple_assign (stmt))
4354 return false;
4356 ptr = gimple_assign_rhs1 (stmt);
4358 tree_code code = gimple_assign_rhs_code (stmt);
4359 if (TREE_CODE (TREE_TYPE (ptr)) != POINTER_TYPE)
4360 /* Avoid conversions from non-pointers. */
4361 return false;
4363 if (code == POINTER_PLUS_EXPR)
4365 /* If the the offset in the expression can be determined use
4366 it to adjust the overall offset. Otherwise, set the overall
4367 offset to the maximum. */
4368 offset_int orng[2];
4369 tree off = gimple_assign_rhs2 (stmt);
4370 if (!get_range (off, SIGNED, orng, rvals))
4372 orng[0] = wi::to_offset (TYPE_MIN_VALUE (ptrdiff_type_node));
4373 orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
4376 pref->offrng[0] += orng[0];
4377 pref->offrng[1] += orng[1];
4379 else if (code != ADDR_EXPR)
4380 return false;
4382 return compute_objsize (ptr, ostype, pref, visited, rvals);
4385 tree type = TREE_TYPE (ptr);
4386 type = TYPE_MAIN_VARIANT (type);
4387 if (TREE_CODE (ptr) == ADDR_EXPR)
4388 ptr = TREE_OPERAND (ptr, 0);
4390 if (TREE_CODE (type) == ARRAY_TYPE
4391 && !array_at_struct_end_p (ptr))
4393 if (tree size = TYPE_SIZE_UNIT (type))
4394 return get_range (size, UNSIGNED, pref->sizrng, rvals);
4397 return false;
4400 /* A "public" wrapper around the above. Clients should use this overload
4401 instead. */
4403 static tree
4404 compute_objsize (tree ptr, int ostype, access_ref *pref,
4405 const vr_values *rvals /* = NULL */)
4407 bitmap visited = NULL;
4409 bool success
4410 = compute_objsize (ptr, ostype, pref, &visited, rvals);
4412 if (visited)
4413 BITMAP_FREE (visited);
4415 if (!success)
4416 return NULL_TREE;
4418 if (pref->offrng[1] < pref->offrng[0])
4420 if (pref->offrng[1] < 0
4421 && pref->sizrng[1] <= pref->offrng[0])
4422 return size_zero_node;
4424 return wide_int_to_tree (sizetype, pref->sizrng[1]);
4427 if (pref->offrng[0] < 0)
4429 if (pref->offrng[1] < 0)
4430 return size_zero_node;
4432 pref->offrng[0] = 0;
4435 if (pref->sizrng[1] <= pref->offrng[0])
4436 return size_zero_node;
4438 return wide_int_to_tree (sizetype, pref->sizrng[1] - pref->offrng[0]);
4441 /* Transitional wrapper around the above. The function should be removed
4442 once callers transition to one of the two above. */
4444 tree
4445 compute_objsize (tree ptr, int ostype, tree *pdecl /* = NULL */,
4446 tree *poff /* = NULL */, const vr_values *rvals /* = NULL */)
4448 /* Set the initial offsets to zero and size to negative to indicate
4449 none has been computed yet. */
4450 access_ref ref;
4451 tree size = compute_objsize (ptr, ostype, &ref, rvals);
4452 if (!size)
4453 return NULL_TREE;
4455 if (pdecl)
4456 *pdecl = ref.ref;
4458 if (poff)
4459 *poff = wide_int_to_tree (ptrdiff_type_node, ref.offrng[ref.offrng[0] < 0]);
4461 return size;
4464 /* Helper to determine and check the sizes of the source and the destination
4465 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
4466 call expression, DEST is the destination argument, SRC is the source
4467 argument or null, and LEN is the number of bytes. Use Object Size type-0
4468 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
4469 (no overflow or invalid sizes), false otherwise. */
4471 static bool
4472 check_memop_access (tree exp, tree dest, tree src, tree size)
4474 /* For functions like memset and memcpy that operate on raw memory
4475 try to determine the size of the largest source and destination
4476 object using type-0 Object Size regardless of the object size
4477 type specified by the option. */
4478 access_data data (exp, access_read_write);
4479 tree srcsize = src ? compute_objsize (src, 0, &data.src) : NULL_TREE;
4480 tree dstsize = compute_objsize (dest, 0, &data.dst);
4482 return check_access (exp, size, /*maxread=*/NULL_TREE,
4483 srcsize, dstsize, data.mode, &data);
4486 /* Validate memchr arguments without performing any expansion.
4487 Return NULL_RTX. */
4489 static rtx
4490 expand_builtin_memchr (tree exp, rtx)
4492 if (!validate_arglist (exp,
4493 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4494 return NULL_RTX;
4496 tree arg1 = CALL_EXPR_ARG (exp, 0);
4497 tree len = CALL_EXPR_ARG (exp, 2);
4499 check_read_access (exp, arg1, len, 0);
4501 return NULL_RTX;
4504 /* Expand a call EXP to the memcpy builtin.
4505 Return NULL_RTX if we failed, the caller should emit a normal call,
4506 otherwise try to get the result in TARGET, if convenient (and in
4507 mode MODE if that's convenient). */
4509 static rtx
4510 expand_builtin_memcpy (tree exp, rtx target)
4512 if (!validate_arglist (exp,
4513 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4514 return NULL_RTX;
4516 tree dest = CALL_EXPR_ARG (exp, 0);
4517 tree src = CALL_EXPR_ARG (exp, 1);
4518 tree len = CALL_EXPR_ARG (exp, 2);
4520 check_memop_access (exp, dest, src, len);
4522 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
4523 /*retmode=*/ RETURN_BEGIN, false);
4526 /* Check a call EXP to the memmove built-in for validity.
4527 Return NULL_RTX on both success and failure. */
4529 static rtx
4530 expand_builtin_memmove (tree exp, rtx target)
4532 if (!validate_arglist (exp,
4533 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4534 return NULL_RTX;
4536 tree dest = CALL_EXPR_ARG (exp, 0);
4537 tree src = CALL_EXPR_ARG (exp, 1);
4538 tree len = CALL_EXPR_ARG (exp, 2);
4540 check_memop_access (exp, dest, src, len);
4542 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
4543 /*retmode=*/ RETURN_BEGIN, true);
4546 /* Expand a call EXP to the mempcpy builtin.
4547 Return NULL_RTX if we failed; the caller should emit a normal call,
4548 otherwise try to get the result in TARGET, if convenient (and in
4549 mode MODE if that's convenient). */
4551 static rtx
4552 expand_builtin_mempcpy (tree exp, rtx target)
4554 if (!validate_arglist (exp,
4555 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4556 return NULL_RTX;
4558 tree dest = CALL_EXPR_ARG (exp, 0);
4559 tree src = CALL_EXPR_ARG (exp, 1);
4560 tree len = CALL_EXPR_ARG (exp, 2);
4562 /* Policy does not generally allow using compute_objsize (which
4563 is used internally by check_memop_size) to change code generation
4564 or drive optimization decisions.
4566 In this instance it is safe because the code we generate has
4567 the same semantics regardless of the return value of
4568 check_memop_sizes. Exactly the same amount of data is copied
4569 and the return value is exactly the same in both cases.
4571 Furthermore, check_memop_size always uses mode 0 for the call to
4572 compute_objsize, so the imprecise nature of compute_objsize is
4573 avoided. */
4575 /* Avoid expanding mempcpy into memcpy when the call is determined
4576 to overflow the buffer. This also prevents the same overflow
4577 from being diagnosed again when expanding memcpy. */
4578 if (!check_memop_access (exp, dest, src, len))
4579 return NULL_RTX;
4581 return expand_builtin_mempcpy_args (dest, src, len,
4582 target, exp, /*retmode=*/ RETURN_END);
4585 /* Helper function to do the actual work for expand of memory copy family
4586 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
4587 of memory from SRC to DEST and assign to TARGET if convenient. Return
4588 value is based on RETMODE argument. */
4590 static rtx
4591 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
4592 rtx target, tree exp, memop_ret retmode,
4593 bool might_overlap)
4595 unsigned int src_align = get_pointer_alignment (src);
4596 unsigned int dest_align = get_pointer_alignment (dest);
4597 rtx dest_mem, src_mem, dest_addr, len_rtx;
4598 HOST_WIDE_INT expected_size = -1;
4599 unsigned int expected_align = 0;
4600 unsigned HOST_WIDE_INT min_size;
4601 unsigned HOST_WIDE_INT max_size;
4602 unsigned HOST_WIDE_INT probable_max_size;
4604 bool is_move_done;
4606 /* If DEST is not a pointer type, call the normal function. */
4607 if (dest_align == 0)
4608 return NULL_RTX;
4610 /* If either SRC is not a pointer type, don't do this
4611 operation in-line. */
4612 if (src_align == 0)
4613 return NULL_RTX;
4615 if (currently_expanding_gimple_stmt)
4616 stringop_block_profile (currently_expanding_gimple_stmt,
4617 &expected_align, &expected_size);
4619 if (expected_align < dest_align)
4620 expected_align = dest_align;
4621 dest_mem = get_memory_rtx (dest, len);
4622 set_mem_align (dest_mem, dest_align);
4623 len_rtx = expand_normal (len);
4624 determine_block_size (len, len_rtx, &min_size, &max_size,
4625 &probable_max_size);
4627 /* Try to get the byte representation of the constant SRC points to,
4628 with its byte size in NBYTES. */
4629 unsigned HOST_WIDE_INT nbytes;
4630 const char *rep = getbyterep (src, &nbytes);
4632 /* If the function's constant bound LEN_RTX is less than or equal
4633 to the byte size of the representation of the constant argument,
4634 and if block move would be done by pieces, we can avoid loading
4635 the bytes from memory and only store the computed constant.
4636 This works in the overlap (memmove) case as well because
4637 store_by_pieces just generates a series of stores of constants
4638 from the representation returned by getbyterep(). */
4639 if (rep
4640 && CONST_INT_P (len_rtx)
4641 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes
4642 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
4643 CONST_CAST (char *, rep),
4644 dest_align, false))
4646 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
4647 builtin_memcpy_read_str,
4648 CONST_CAST (char *, rep),
4649 dest_align, false, retmode);
4650 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4651 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4652 return dest_mem;
4655 src_mem = get_memory_rtx (src, len);
4656 set_mem_align (src_mem, src_align);
4658 /* Copy word part most expediently. */
4659 enum block_op_methods method = BLOCK_OP_NORMAL;
4660 if (CALL_EXPR_TAILCALL (exp)
4661 && (retmode == RETURN_BEGIN || target == const0_rtx))
4662 method = BLOCK_OP_TAILCALL;
4663 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
4664 && retmode == RETURN_END
4665 && !might_overlap
4666 && target != const0_rtx);
4667 if (use_mempcpy_call)
4668 method = BLOCK_OP_NO_LIBCALL_RET;
4669 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
4670 expected_align, expected_size,
4671 min_size, max_size, probable_max_size,
4672 use_mempcpy_call, &is_move_done,
4673 might_overlap);
4675 /* Bail out when a mempcpy call would be expanded as libcall and when
4676 we have a target that provides a fast implementation
4677 of mempcpy routine. */
4678 if (!is_move_done)
4679 return NULL_RTX;
4681 if (dest_addr == pc_rtx)
4682 return NULL_RTX;
4684 if (dest_addr == 0)
4686 dest_addr = force_operand (XEXP (dest_mem, 0), target);
4687 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4690 if (retmode != RETURN_BEGIN && target != const0_rtx)
4692 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
4693 /* stpcpy pointer to last byte. */
4694 if (retmode == RETURN_END_MINUS_ONE)
4695 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
4698 return dest_addr;
4701 static rtx
4702 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
4703 rtx target, tree orig_exp, memop_ret retmode)
4705 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
4706 retmode, false);
4709 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
4710 we failed, the caller should emit a normal call, otherwise try to
4711 get the result in TARGET, if convenient.
4712 Return value is based on RETMODE argument. */
4714 static rtx
4715 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
4717 class expand_operand ops[3];
4718 rtx dest_mem;
4719 rtx src_mem;
4721 if (!targetm.have_movstr ())
4722 return NULL_RTX;
4724 dest_mem = get_memory_rtx (dest, NULL);
4725 src_mem = get_memory_rtx (src, NULL);
4726 if (retmode == RETURN_BEGIN)
4728 target = force_reg (Pmode, XEXP (dest_mem, 0));
4729 dest_mem = replace_equiv_address (dest_mem, target);
4732 create_output_operand (&ops[0],
4733 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
4734 create_fixed_operand (&ops[1], dest_mem);
4735 create_fixed_operand (&ops[2], src_mem);
4736 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
4737 return NULL_RTX;
4739 if (retmode != RETURN_BEGIN && target != const0_rtx)
4741 target = ops[0].value;
4742 /* movstr is supposed to set end to the address of the NUL
4743 terminator. If the caller requested a mempcpy-like return value,
4744 adjust it. */
4745 if (retmode == RETURN_END)
4747 rtx tem = plus_constant (GET_MODE (target),
4748 gen_lowpart (GET_MODE (target), target), 1);
4749 emit_move_insn (target, force_operand (tem, NULL_RTX));
4752 return target;
4755 /* Do some very basic size validation of a call to the strcpy builtin
4756 given by EXP. Return NULL_RTX to have the built-in expand to a call
4757 to the library function. */
4759 static rtx
4760 expand_builtin_strcat (tree exp)
4762 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
4763 || !warn_stringop_overflow)
4764 return NULL_RTX;
4766 tree dest = CALL_EXPR_ARG (exp, 0);
4767 tree src = CALL_EXPR_ARG (exp, 1);
4769 /* There is no way here to determine the length of the string in
4770 the destination to which the SRC string is being appended so
4771 just diagnose cases when the souce string is longer than
4772 the destination object. */
4773 access_data data (exp, access_read_write, NULL_TREE, true,
4774 NULL_TREE, true);
4775 const int ost = warn_stringop_overflow ? warn_stringop_overflow - 1 : 1;
4776 compute_objsize (src, ost, &data.src);
4777 tree destsize = compute_objsize (dest, ost, &data.dst);
4779 check_access (exp, /*dstwrite=*/NULL_TREE, /*maxread=*/NULL_TREE,
4780 src, destsize, data.mode, &data);
4782 return NULL_RTX;
4785 /* Expand expression EXP, which is a call to the strcpy builtin. Return
4786 NULL_RTX if we failed the caller should emit a normal call, otherwise
4787 try to get the result in TARGET, if convenient (and in mode MODE if that's
4788 convenient). */
4790 static rtx
4791 expand_builtin_strcpy (tree exp, rtx target)
4793 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4794 return NULL_RTX;
4796 tree dest = CALL_EXPR_ARG (exp, 0);
4797 tree src = CALL_EXPR_ARG (exp, 1);
4799 if (warn_stringop_overflow)
4801 access_data data (exp, access_read_write, NULL_TREE, true,
4802 NULL_TREE, true);
4803 const int ost = warn_stringop_overflow ? warn_stringop_overflow - 1 : 1;
4804 compute_objsize (src, ost, &data.src);
4805 tree dstsize = compute_objsize (dest, ost, &data.dst);
4806 check_access (exp, /*dstwrite=*/ NULL_TREE,
4807 /*maxread=*/ NULL_TREE, /*srcstr=*/ src,
4808 dstsize, data.mode, &data);
4811 if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
4813 /* Check to see if the argument was declared attribute nonstring
4814 and if so, issue a warning since at this point it's not known
4815 to be nul-terminated. */
4816 tree fndecl = get_callee_fndecl (exp);
4817 maybe_warn_nonstring_arg (fndecl, exp);
4818 return ret;
4821 return NULL_RTX;
4824 /* Helper function to do the actual work for expand_builtin_strcpy. The
4825 arguments to the builtin_strcpy call DEST and SRC are broken out
4826 so that this can also be called without constructing an actual CALL_EXPR.
4827 The other arguments and return value are the same as for
4828 expand_builtin_strcpy. */
4830 static rtx
4831 expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target)
4833 /* Detect strcpy calls with unterminated arrays.. */
4834 tree size;
4835 bool exact;
4836 if (tree nonstr = unterminated_array (src, &size, &exact))
4838 /* NONSTR refers to the non-nul terminated constant array. */
4839 warn_string_no_nul (EXPR_LOCATION (exp), exp, NULL, src, nonstr,
4840 size, exact);
4841 return NULL_RTX;
4844 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
4847 /* Expand a call EXP to the stpcpy builtin.
4848 Return NULL_RTX if we failed the caller should emit a normal call,
4849 otherwise try to get the result in TARGET, if convenient (and in
4850 mode MODE if that's convenient). */
4852 static rtx
4853 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
4855 tree dst, src;
4856 location_t loc = EXPR_LOCATION (exp);
4858 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4859 return NULL_RTX;
4861 dst = CALL_EXPR_ARG (exp, 0);
4862 src = CALL_EXPR_ARG (exp, 1);
4864 if (warn_stringop_overflow)
4866 access_data data (exp, access_read_write);
4867 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1,
4868 &data.dst);
4869 check_access (exp, /*dstwrite=*/NULL_TREE, /*maxread=*/NULL_TREE,
4870 src, destsize, data.mode, &data);
4873 /* If return value is ignored, transform stpcpy into strcpy. */
4874 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
4876 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
4877 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
4878 return expand_expr (result, target, mode, EXPAND_NORMAL);
4880 else
4882 tree len, lenp1;
4883 rtx ret;
4885 /* Ensure we get an actual string whose length can be evaluated at
4886 compile-time, not an expression containing a string. This is
4887 because the latter will potentially produce pessimized code
4888 when used to produce the return value. */
4889 c_strlen_data lendata = { };
4890 if (!c_getstr (src)
4891 || !(len = c_strlen (src, 0, &lendata, 1)))
4892 return expand_movstr (dst, src, target,
4893 /*retmode=*/ RETURN_END_MINUS_ONE);
4895 if (lendata.decl)
4896 warn_string_no_nul (EXPR_LOCATION (exp), exp, NULL, src, lendata.decl);
4898 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
4899 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
4900 target, exp,
4901 /*retmode=*/ RETURN_END_MINUS_ONE);
4903 if (ret)
4904 return ret;
4906 if (TREE_CODE (len) == INTEGER_CST)
4908 rtx len_rtx = expand_normal (len);
4910 if (CONST_INT_P (len_rtx))
4912 ret = expand_builtin_strcpy_args (exp, dst, src, target);
4914 if (ret)
4916 if (! target)
4918 if (mode != VOIDmode)
4919 target = gen_reg_rtx (mode);
4920 else
4921 target = gen_reg_rtx (GET_MODE (ret));
4923 if (GET_MODE (target) != GET_MODE (ret))
4924 ret = gen_lowpart (GET_MODE (target), ret);
4926 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
4927 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
4928 gcc_assert (ret);
4930 return target;
4935 return expand_movstr (dst, src, target,
4936 /*retmode=*/ RETURN_END_MINUS_ONE);
4940 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
4941 arguments while being careful to avoid duplicate warnings (which could
4942 be issued if the expander were to expand the call, resulting in it
4943 being emitted in expand_call(). */
4945 static rtx
4946 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
4948 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
4950 /* The call has been successfully expanded. Check for nonstring
4951 arguments and issue warnings as appropriate. */
4952 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
4953 return ret;
4956 return NULL_RTX;
4959 /* Check a call EXP to the stpncpy built-in for validity.
4960 Return NULL_RTX on both success and failure. */
4962 static rtx
4963 expand_builtin_stpncpy (tree exp, rtx)
4965 if (!validate_arglist (exp,
4966 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4967 || !warn_stringop_overflow)
4968 return NULL_RTX;
4970 /* The source and destination of the call. */
4971 tree dest = CALL_EXPR_ARG (exp, 0);
4972 tree src = CALL_EXPR_ARG (exp, 1);
4974 /* The exact number of bytes to write (not the maximum). */
4975 tree len = CALL_EXPR_ARG (exp, 2);
4976 access_data data (exp, access_read_write);
4977 /* The size of the destination object. */
4978 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
4979 check_access (exp, len, /*maxread=*/len, src, destsize, data.mode, &data);
4981 return NULL_RTX;
4984 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4985 bytes from constant string DATA + OFFSET and return it as target
4986 constant. */
4989 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
4990 scalar_int_mode mode)
4992 const char *str = (const char *) data;
4994 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4995 return const0_rtx;
4997 return c_readstr (str + offset, mode);
5000 /* Helper to check the sizes of sequences and the destination of calls
5001 to __builtin_strncat and __builtin___strncat_chk. Returns true on
5002 success (no overflow or invalid sizes), false otherwise. */
5004 static bool
5005 check_strncat_sizes (tree exp, tree objsize)
5007 tree dest = CALL_EXPR_ARG (exp, 0);
5008 tree src = CALL_EXPR_ARG (exp, 1);
5009 tree maxread = CALL_EXPR_ARG (exp, 2);
5011 /* Try to determine the range of lengths that the source expression
5012 refers to. */
5013 c_strlen_data lendata = { };
5014 get_range_strlen (src, &lendata, /* eltsize = */ 1);
5016 /* Try to verify that the destination is big enough for the shortest
5017 string. */
5019 access_data data (exp, access_read_write, maxread, true);
5020 if (!objsize && warn_stringop_overflow)
5022 /* If it hasn't been provided by __strncat_chk, try to determine
5023 the size of the destination object into which the source is
5024 being copied. */
5025 objsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
5028 /* Add one for the terminating nul. */
5029 tree srclen = (lendata.minlen
5030 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
5031 size_one_node)
5032 : NULL_TREE);
5034 /* The strncat function copies at most MAXREAD bytes and always appends
5035 the terminating nul so the specified upper bound should never be equal
5036 to (or greater than) the size of the destination. */
5037 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
5038 && tree_int_cst_equal (objsize, maxread))
5040 location_t loc = tree_nonartificial_location (exp);
5041 loc = expansion_point_location_if_in_system_header (loc);
5043 warning_at (loc, OPT_Wstringop_overflow_,
5044 "%K%qD specified bound %E equals destination size",
5045 exp, get_callee_fndecl (exp), maxread);
5047 return false;
5050 if (!srclen
5051 || (maxread && tree_fits_uhwi_p (maxread)
5052 && tree_fits_uhwi_p (srclen)
5053 && tree_int_cst_lt (maxread, srclen)))
5054 srclen = maxread;
5056 /* The number of bytes to write is LEN but check_access will alsoa
5057 check SRCLEN if LEN's value isn't known. */
5058 return check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
5059 objsize, data.mode, &data);
5062 /* Similar to expand_builtin_strcat, do some very basic size validation
5063 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
5064 the built-in expand to a call to the library function. */
5066 static rtx
5067 expand_builtin_strncat (tree exp, rtx)
5069 if (!validate_arglist (exp,
5070 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5071 || !warn_stringop_overflow)
5072 return NULL_RTX;
5074 tree dest = CALL_EXPR_ARG (exp, 0);
5075 tree src = CALL_EXPR_ARG (exp, 1);
5076 /* The upper bound on the number of bytes to write. */
5077 tree maxread = CALL_EXPR_ARG (exp, 2);
5079 /* Detect unterminated source (only). */
5080 if (!check_nul_terminated_array (exp, src, maxread))
5081 return NULL_RTX;
5083 /* The length of the source sequence. */
5084 tree slen = c_strlen (src, 1);
5086 /* Try to determine the range of lengths that the source expression
5087 refers to. Since the lengths are only used for warning and not
5088 for code generation disable strict mode below. */
5089 tree maxlen = slen;
5090 if (!maxlen)
5092 c_strlen_data lendata = { };
5093 get_range_strlen (src, &lendata, /* eltsize = */ 1);
5094 maxlen = lendata.maxbound;
5097 access_data data (exp, access_read_write);
5098 /* Try to verify that the destination is big enough for the shortest
5099 string. First try to determine the size of the destination object
5100 into which the source is being copied. */
5101 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
5103 /* Add one for the terminating nul. */
5104 tree srclen = (maxlen
5105 ? fold_build2 (PLUS_EXPR, size_type_node, maxlen,
5106 size_one_node)
5107 : NULL_TREE);
5109 /* The strncat function copies at most MAXREAD bytes and always appends
5110 the terminating nul so the specified upper bound should never be equal
5111 to (or greater than) the size of the destination. */
5112 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
5113 && tree_int_cst_equal (destsize, maxread))
5115 location_t loc = tree_nonartificial_location (exp);
5116 loc = expansion_point_location_if_in_system_header (loc);
5118 warning_at (loc, OPT_Wstringop_overflow_,
5119 "%K%qD specified bound %E equals destination size",
5120 exp, get_callee_fndecl (exp), maxread);
5122 return NULL_RTX;
5125 if (!srclen
5126 || (maxread && tree_fits_uhwi_p (maxread)
5127 && tree_fits_uhwi_p (srclen)
5128 && tree_int_cst_lt (maxread, srclen)))
5129 srclen = maxread;
5131 check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
5132 destsize, data.mode, &data);
5134 return NULL_RTX;
5137 /* Expand expression EXP, which is a call to the strncpy builtin. Return
5138 NULL_RTX if we failed the caller should emit a normal call. */
5140 static rtx
5141 expand_builtin_strncpy (tree exp, rtx target)
5143 location_t loc = EXPR_LOCATION (exp);
5145 if (!validate_arglist (exp,
5146 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5147 return NULL_RTX;
5148 tree dest = CALL_EXPR_ARG (exp, 0);
5149 tree src = CALL_EXPR_ARG (exp, 1);
5150 /* The number of bytes to write (not the maximum). */
5151 tree len = CALL_EXPR_ARG (exp, 2);
5153 /* The length of the source sequence. */
5154 tree slen = c_strlen (src, 1);
5156 if (warn_stringop_overflow)
5158 access_data data (exp, access_read_write, len, true, len, true);
5159 const int ost = warn_stringop_overflow ? warn_stringop_overflow - 1 : 1;
5160 compute_objsize (src, ost, &data.src);
5161 tree dstsize = compute_objsize (dest, ost, &data.dst);
5162 /* The number of bytes to write is LEN but check_access will also
5163 check SLEN if LEN's value isn't known. */
5164 check_access (exp, /*dstwrite=*/len,
5165 /*maxread=*/len, src, dstsize, data.mode, &data);
5168 /* We must be passed a constant len and src parameter. */
5169 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
5170 return NULL_RTX;
5172 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
5174 /* We're required to pad with trailing zeros if the requested
5175 len is greater than strlen(s2)+1. In that case try to
5176 use store_by_pieces, if it fails, punt. */
5177 if (tree_int_cst_lt (slen, len))
5179 unsigned int dest_align = get_pointer_alignment (dest);
5180 const char *p = c_getstr (src);
5181 rtx dest_mem;
5183 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
5184 || !can_store_by_pieces (tree_to_uhwi (len),
5185 builtin_strncpy_read_str,
5186 CONST_CAST (char *, p),
5187 dest_align, false))
5188 return NULL_RTX;
5190 dest_mem = get_memory_rtx (dest, len);
5191 store_by_pieces (dest_mem, tree_to_uhwi (len),
5192 builtin_strncpy_read_str,
5193 CONST_CAST (char *, p), dest_align, false,
5194 RETURN_BEGIN);
5195 dest_mem = force_operand (XEXP (dest_mem, 0), target);
5196 dest_mem = convert_memory_address (ptr_mode, dest_mem);
5197 return dest_mem;
5200 return NULL_RTX;
5203 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
5204 bytes from constant string DATA + OFFSET and return it as target
5205 constant. */
5208 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
5209 scalar_int_mode mode)
5211 const char *c = (const char *) data;
5212 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
5214 memset (p, *c, GET_MODE_SIZE (mode));
5216 return c_readstr (p, mode);
5219 /* Callback routine for store_by_pieces. Return the RTL of a register
5220 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
5221 char value given in the RTL register data. For example, if mode is
5222 4 bytes wide, return the RTL for 0x01010101*data. */
5224 static rtx
5225 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
5226 scalar_int_mode mode)
5228 rtx target, coeff;
5229 size_t size;
5230 char *p;
5232 size = GET_MODE_SIZE (mode);
5233 if (size == 1)
5234 return (rtx) data;
5236 p = XALLOCAVEC (char, size);
5237 memset (p, 1, size);
5238 coeff = c_readstr (p, mode);
5240 target = convert_to_mode (mode, (rtx) data, 1);
5241 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
5242 return force_reg (mode, target);
5245 /* Expand expression EXP, which is a call to the memset builtin. Return
5246 NULL_RTX if we failed the caller should emit a normal call, otherwise
5247 try to get the result in TARGET, if convenient (and in mode MODE if that's
5248 convenient). */
5250 static rtx
5251 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
5253 if (!validate_arglist (exp,
5254 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
5255 return NULL_RTX;
5257 tree dest = CALL_EXPR_ARG (exp, 0);
5258 tree val = CALL_EXPR_ARG (exp, 1);
5259 tree len = CALL_EXPR_ARG (exp, 2);
5261 check_memop_access (exp, dest, NULL_TREE, len);
5263 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
5266 /* Helper function to do the actual work for expand_builtin_memset. The
5267 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
5268 so that this can also be called without constructing an actual CALL_EXPR.
5269 The other arguments and return value are the same as for
5270 expand_builtin_memset. */
5272 static rtx
5273 expand_builtin_memset_args (tree dest, tree val, tree len,
5274 rtx target, machine_mode mode, tree orig_exp)
5276 tree fndecl, fn;
5277 enum built_in_function fcode;
5278 machine_mode val_mode;
5279 char c;
5280 unsigned int dest_align;
5281 rtx dest_mem, dest_addr, len_rtx;
5282 HOST_WIDE_INT expected_size = -1;
5283 unsigned int expected_align = 0;
5284 unsigned HOST_WIDE_INT min_size;
5285 unsigned HOST_WIDE_INT max_size;
5286 unsigned HOST_WIDE_INT probable_max_size;
5288 dest_align = get_pointer_alignment (dest);
5290 /* If DEST is not a pointer type, don't do this operation in-line. */
5291 if (dest_align == 0)
5292 return NULL_RTX;
5294 if (currently_expanding_gimple_stmt)
5295 stringop_block_profile (currently_expanding_gimple_stmt,
5296 &expected_align, &expected_size);
5298 if (expected_align < dest_align)
5299 expected_align = dest_align;
5301 /* If the LEN parameter is zero, return DEST. */
5302 if (integer_zerop (len))
5304 /* Evaluate and ignore VAL in case it has side-effects. */
5305 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
5306 return expand_expr (dest, target, mode, EXPAND_NORMAL);
5309 /* Stabilize the arguments in case we fail. */
5310 dest = builtin_save_expr (dest);
5311 val = builtin_save_expr (val);
5312 len = builtin_save_expr (len);
5314 len_rtx = expand_normal (len);
5315 determine_block_size (len, len_rtx, &min_size, &max_size,
5316 &probable_max_size);
5317 dest_mem = get_memory_rtx (dest, len);
5318 val_mode = TYPE_MODE (unsigned_char_type_node);
5320 if (TREE_CODE (val) != INTEGER_CST)
5322 rtx val_rtx;
5324 val_rtx = expand_normal (val);
5325 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
5327 /* Assume that we can memset by pieces if we can store
5328 * the coefficients by pieces (in the required modes).
5329 * We can't pass builtin_memset_gen_str as that emits RTL. */
5330 c = 1;
5331 if (tree_fits_uhwi_p (len)
5332 && can_store_by_pieces (tree_to_uhwi (len),
5333 builtin_memset_read_str, &c, dest_align,
5334 true))
5336 val_rtx = force_reg (val_mode, val_rtx);
5337 store_by_pieces (dest_mem, tree_to_uhwi (len),
5338 builtin_memset_gen_str, val_rtx, dest_align,
5339 true, RETURN_BEGIN);
5341 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
5342 dest_align, expected_align,
5343 expected_size, min_size, max_size,
5344 probable_max_size))
5345 goto do_libcall;
5347 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
5348 dest_mem = convert_memory_address (ptr_mode, dest_mem);
5349 return dest_mem;
5352 if (target_char_cast (val, &c))
5353 goto do_libcall;
5355 if (c)
5357 if (tree_fits_uhwi_p (len)
5358 && can_store_by_pieces (tree_to_uhwi (len),
5359 builtin_memset_read_str, &c, dest_align,
5360 true))
5361 store_by_pieces (dest_mem, tree_to_uhwi (len),
5362 builtin_memset_read_str, &c, dest_align, true,
5363 RETURN_BEGIN);
5364 else if (!set_storage_via_setmem (dest_mem, len_rtx,
5365 gen_int_mode (c, val_mode),
5366 dest_align, expected_align,
5367 expected_size, min_size, max_size,
5368 probable_max_size))
5369 goto do_libcall;
5371 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
5372 dest_mem = convert_memory_address (ptr_mode, dest_mem);
5373 return dest_mem;
5376 set_mem_align (dest_mem, dest_align);
5377 dest_addr = clear_storage_hints (dest_mem, len_rtx,
5378 CALL_EXPR_TAILCALL (orig_exp)
5379 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
5380 expected_align, expected_size,
5381 min_size, max_size,
5382 probable_max_size);
5384 if (dest_addr == 0)
5386 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
5387 dest_addr = convert_memory_address (ptr_mode, dest_addr);
5390 return dest_addr;
5392 do_libcall:
5393 fndecl = get_callee_fndecl (orig_exp);
5394 fcode = DECL_FUNCTION_CODE (fndecl);
5395 if (fcode == BUILT_IN_MEMSET)
5396 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
5397 dest, val, len);
5398 else if (fcode == BUILT_IN_BZERO)
5399 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
5400 dest, len);
5401 else
5402 gcc_unreachable ();
5403 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5404 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
5405 return expand_call (fn, target, target == const0_rtx);
5408 /* Expand expression EXP, which is a call to the bzero builtin. Return
5409 NULL_RTX if we failed the caller should emit a normal call. */
5411 static rtx
5412 expand_builtin_bzero (tree exp)
5414 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5415 return NULL_RTX;
5417 tree dest = CALL_EXPR_ARG (exp, 0);
5418 tree size = CALL_EXPR_ARG (exp, 1);
5420 check_memop_access (exp, dest, NULL_TREE, size);
5422 /* New argument list transforming bzero(ptr x, int y) to
5423 memset(ptr x, int 0, size_t y). This is done this way
5424 so that if it isn't expanded inline, we fallback to
5425 calling bzero instead of memset. */
5427 location_t loc = EXPR_LOCATION (exp);
5429 return expand_builtin_memset_args (dest, integer_zero_node,
5430 fold_convert_loc (loc,
5431 size_type_node, size),
5432 const0_rtx, VOIDmode, exp);
5435 /* Try to expand cmpstr operation ICODE with the given operands.
5436 Return the result rtx on success, otherwise return null. */
5438 static rtx
5439 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
5440 HOST_WIDE_INT align)
5442 machine_mode insn_mode = insn_data[icode].operand[0].mode;
5444 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
5445 target = NULL_RTX;
5447 class expand_operand ops[4];
5448 create_output_operand (&ops[0], target, insn_mode);
5449 create_fixed_operand (&ops[1], arg1_rtx);
5450 create_fixed_operand (&ops[2], arg2_rtx);
5451 create_integer_operand (&ops[3], align);
5452 if (maybe_expand_insn (icode, 4, ops))
5453 return ops[0].value;
5454 return NULL_RTX;
5457 /* Expand expression EXP, which is a call to the memcmp built-in function.
5458 Return NULL_RTX if we failed and the caller should emit a normal call,
5459 otherwise try to get the result in TARGET, if convenient.
5460 RESULT_EQ is true if we can relax the returned value to be either zero
5461 or nonzero, without caring about the sign. */
5463 static rtx
5464 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
5466 if (!validate_arglist (exp,
5467 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5468 return NULL_RTX;
5470 tree arg1 = CALL_EXPR_ARG (exp, 0);
5471 tree arg2 = CALL_EXPR_ARG (exp, 1);
5472 tree len = CALL_EXPR_ARG (exp, 2);
5474 /* Diagnose calls where the specified length exceeds the size of either
5475 object. */
5476 if (!check_read_access (exp, arg1, len, 0)
5477 || !check_read_access (exp, arg2, len, 0))
5478 return NULL_RTX;
5480 /* Due to the performance benefit, always inline the calls first
5481 when result_eq is false. */
5482 rtx result = NULL_RTX;
5483 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
5484 if (!result_eq && fcode != BUILT_IN_BCMP)
5486 result = inline_expand_builtin_bytecmp (exp, target);
5487 if (result)
5488 return result;
5491 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5492 location_t loc = EXPR_LOCATION (exp);
5494 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5495 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
5497 /* If we don't have POINTER_TYPE, call the function. */
5498 if (arg1_align == 0 || arg2_align == 0)
5499 return NULL_RTX;
5501 rtx arg1_rtx = get_memory_rtx (arg1, len);
5502 rtx arg2_rtx = get_memory_rtx (arg2, len);
5503 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
5505 /* Set MEM_SIZE as appropriate. */
5506 if (CONST_INT_P (len_rtx))
5508 set_mem_size (arg1_rtx, INTVAL (len_rtx));
5509 set_mem_size (arg2_rtx, INTVAL (len_rtx));
5512 by_pieces_constfn constfn = NULL;
5514 /* Try to get the byte representation of the constant ARG2 (or, only
5515 when the function's result is used for equality to zero, ARG1)
5516 points to, with its byte size in NBYTES. */
5517 unsigned HOST_WIDE_INT nbytes;
5518 const char *rep = getbyterep (arg2, &nbytes);
5519 if (result_eq && rep == NULL)
5521 /* For equality to zero the arguments are interchangeable. */
5522 rep = getbyterep (arg1, &nbytes);
5523 if (rep != NULL)
5524 std::swap (arg1_rtx, arg2_rtx);
5527 /* If the function's constant bound LEN_RTX is less than or equal
5528 to the byte size of the representation of the constant argument,
5529 and if block move would be done by pieces, we can avoid loading
5530 the bytes from memory and only store the computed constant result. */
5531 if (rep
5532 && CONST_INT_P (len_rtx)
5533 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes)
5534 constfn = builtin_memcpy_read_str;
5536 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
5537 TREE_TYPE (len), target,
5538 result_eq, constfn,
5539 CONST_CAST (char *, rep));
5541 if (result)
5543 /* Return the value in the proper mode for this function. */
5544 if (GET_MODE (result) == mode)
5545 return result;
5547 if (target != 0)
5549 convert_move (target, result, 0);
5550 return target;
5553 return convert_to_mode (mode, result, 0);
5556 return NULL_RTX;
5559 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
5560 if we failed the caller should emit a normal call, otherwise try to get
5561 the result in TARGET, if convenient. */
5563 static rtx
5564 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
5566 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5567 return NULL_RTX;
5569 tree arg1 = CALL_EXPR_ARG (exp, 0);
5570 tree arg2 = CALL_EXPR_ARG (exp, 1);
5572 if (!check_read_access (exp, arg1)
5573 || !check_read_access (exp, arg2))
5574 return NULL_RTX;
5576 /* Due to the performance benefit, always inline the calls first. */
5577 rtx result = NULL_RTX;
5578 result = inline_expand_builtin_bytecmp (exp, target);
5579 if (result)
5580 return result;
5582 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
5583 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
5584 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
5585 return NULL_RTX;
5587 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5588 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
5590 /* If we don't have POINTER_TYPE, call the function. */
5591 if (arg1_align == 0 || arg2_align == 0)
5592 return NULL_RTX;
5594 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
5595 arg1 = builtin_save_expr (arg1);
5596 arg2 = builtin_save_expr (arg2);
5598 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
5599 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
5601 /* Try to call cmpstrsi. */
5602 if (cmpstr_icode != CODE_FOR_nothing)
5603 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
5604 MIN (arg1_align, arg2_align));
5606 /* Try to determine at least one length and call cmpstrnsi. */
5607 if (!result && cmpstrn_icode != CODE_FOR_nothing)
5609 tree len;
5610 rtx arg3_rtx;
5612 tree len1 = c_strlen (arg1, 1);
5613 tree len2 = c_strlen (arg2, 1);
5615 if (len1)
5616 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
5617 if (len2)
5618 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
5620 /* If we don't have a constant length for the first, use the length
5621 of the second, if we know it. We don't require a constant for
5622 this case; some cost analysis could be done if both are available
5623 but neither is constant. For now, assume they're equally cheap,
5624 unless one has side effects. If both strings have constant lengths,
5625 use the smaller. */
5627 if (!len1)
5628 len = len2;
5629 else if (!len2)
5630 len = len1;
5631 else if (TREE_SIDE_EFFECTS (len1))
5632 len = len2;
5633 else if (TREE_SIDE_EFFECTS (len2))
5634 len = len1;
5635 else if (TREE_CODE (len1) != INTEGER_CST)
5636 len = len2;
5637 else if (TREE_CODE (len2) != INTEGER_CST)
5638 len = len1;
5639 else if (tree_int_cst_lt (len1, len2))
5640 len = len1;
5641 else
5642 len = len2;
5644 /* If both arguments have side effects, we cannot optimize. */
5645 if (len && !TREE_SIDE_EFFECTS (len))
5647 arg3_rtx = expand_normal (len);
5648 result = expand_cmpstrn_or_cmpmem
5649 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
5650 arg3_rtx, MIN (arg1_align, arg2_align));
5654 tree fndecl = get_callee_fndecl (exp);
5655 if (result)
5657 /* Check to see if the argument was declared attribute nonstring
5658 and if so, issue a warning since at this point it's not known
5659 to be nul-terminated. */
5660 maybe_warn_nonstring_arg (fndecl, exp);
5662 /* Return the value in the proper mode for this function. */
5663 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5664 if (GET_MODE (result) == mode)
5665 return result;
5666 if (target == 0)
5667 return convert_to_mode (mode, result, 0);
5668 convert_move (target, result, 0);
5669 return target;
5672 /* Expand the library call ourselves using a stabilized argument
5673 list to avoid re-evaluating the function's arguments twice. */
5674 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
5675 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5676 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5677 return expand_call (fn, target, target == const0_rtx);
5680 /* Expand expression EXP, which is a call to the strncmp builtin. Return
5681 NULL_RTX if we failed the caller should emit a normal call, otherwise
5682 try to get the result in TARGET, if convenient. */
5684 static rtx
5685 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
5686 ATTRIBUTE_UNUSED machine_mode mode)
5688 if (!validate_arglist (exp,
5689 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5690 return NULL_RTX;
5692 tree arg1 = CALL_EXPR_ARG (exp, 0);
5693 tree arg2 = CALL_EXPR_ARG (exp, 1);
5694 tree arg3 = CALL_EXPR_ARG (exp, 2);
5696 if (!check_nul_terminated_array (exp, arg1, arg3)
5697 || !check_nul_terminated_array (exp, arg2, arg3))
5698 return NULL_RTX;
5700 location_t loc = tree_nonartificial_location (exp);
5701 loc = expansion_point_location_if_in_system_header (loc);
5703 tree len1 = c_strlen (arg1, 1);
5704 tree len2 = c_strlen (arg2, 1);
5706 if (!len1 || !len2)
5708 /* Check to see if the argument was declared attribute nonstring
5709 and if so, issue a warning since at this point it's not known
5710 to be nul-terminated. */
5711 if (!maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp)
5712 && !len1 && !len2)
5714 /* A strncmp read is constrained not just by the bound but
5715 also by the length of the shorter string. Specifying
5716 a bound that's larger than the size of either array makes
5717 no sense and is likely a bug. When the length of neither
5718 of the two strings is known but the sizes of both of
5719 the arrays they are stored in is, issue a warning if
5720 the bound is larger than than the size of the larger
5721 of the two arrays. */
5723 access_ref ref1 (arg3, true);
5724 access_ref ref2 (arg3, true);
5726 tree bndrng[2] = { NULL_TREE, NULL_TREE };
5727 get_size_range (arg3, bndrng, ref1.bndrng);
5729 tree size1 = compute_objsize (arg1, 1, &ref1);
5730 tree size2 = compute_objsize (arg2, 1, &ref2);
5731 tree func = get_callee_fndecl (exp);
5733 if (size1 && size2)
5735 tree maxsize = tree_int_cst_le (size1, size2) ? size2 : size1;
5737 if (tree_int_cst_lt (maxsize, bndrng[0]))
5738 maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp, func,
5739 bndrng, maxsize);
5741 else if (bndrng[0]
5742 && !integer_zerop (bndrng[0])
5743 && ((size1 && integer_zerop (size1))
5744 || (size2 && integer_zerop (size2))))
5745 maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp, func,
5746 bndrng, integer_zero_node);
5750 /* Due to the performance benefit, always inline the calls first. */
5751 rtx result = NULL_RTX;
5752 result = inline_expand_builtin_bytecmp (exp, target);
5753 if (result)
5754 return result;
5756 /* If c_strlen can determine an expression for one of the string
5757 lengths, and it doesn't have side effects, then emit cmpstrnsi
5758 using length MIN(strlen(string)+1, arg3). */
5759 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
5760 if (cmpstrn_icode == CODE_FOR_nothing)
5761 return NULL_RTX;
5763 tree len;
5765 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5766 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
5768 if (len1)
5769 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
5770 if (len2)
5771 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
5773 tree len3 = fold_convert_loc (loc, sizetype, arg3);
5775 /* If we don't have a constant length for the first, use the length
5776 of the second, if we know it. If neither string is constant length,
5777 use the given length argument. We don't require a constant for
5778 this case; some cost analysis could be done if both are available
5779 but neither is constant. For now, assume they're equally cheap,
5780 unless one has side effects. If both strings have constant lengths,
5781 use the smaller. */
5783 if (!len1 && !len2)
5784 len = len3;
5785 else if (!len1)
5786 len = len2;
5787 else if (!len2)
5788 len = len1;
5789 else if (TREE_SIDE_EFFECTS (len1))
5790 len = len2;
5791 else if (TREE_SIDE_EFFECTS (len2))
5792 len = len1;
5793 else if (TREE_CODE (len1) != INTEGER_CST)
5794 len = len2;
5795 else if (TREE_CODE (len2) != INTEGER_CST)
5796 len = len1;
5797 else if (tree_int_cst_lt (len1, len2))
5798 len = len1;
5799 else
5800 len = len2;
5802 /* If we are not using the given length, we must incorporate it here.
5803 The actual new length parameter will be MIN(len,arg3) in this case. */
5804 if (len != len3)
5806 len = fold_convert_loc (loc, sizetype, len);
5807 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
5809 rtx arg1_rtx = get_memory_rtx (arg1, len);
5810 rtx arg2_rtx = get_memory_rtx (arg2, len);
5811 rtx arg3_rtx = expand_normal (len);
5812 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
5813 arg2_rtx, TREE_TYPE (len), arg3_rtx,
5814 MIN (arg1_align, arg2_align));
5816 tree fndecl = get_callee_fndecl (exp);
5817 if (result)
5819 /* Return the value in the proper mode for this function. */
5820 mode = TYPE_MODE (TREE_TYPE (exp));
5821 if (GET_MODE (result) == mode)
5822 return result;
5823 if (target == 0)
5824 return convert_to_mode (mode, result, 0);
5825 convert_move (target, result, 0);
5826 return target;
5829 /* Expand the library call ourselves using a stabilized argument
5830 list to avoid re-evaluating the function's arguments twice. */
5831 tree call = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
5832 if (TREE_NO_WARNING (exp))
5833 TREE_NO_WARNING (call) = true;
5834 gcc_assert (TREE_CODE (call) == CALL_EXPR);
5835 CALL_EXPR_TAILCALL (call) = CALL_EXPR_TAILCALL (exp);
5836 return expand_call (call, target, target == const0_rtx);
5839 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
5840 if that's convenient. */
5843 expand_builtin_saveregs (void)
5845 rtx val;
5846 rtx_insn *seq;
5848 /* Don't do __builtin_saveregs more than once in a function.
5849 Save the result of the first call and reuse it. */
5850 if (saveregs_value != 0)
5851 return saveregs_value;
5853 /* When this function is called, it means that registers must be
5854 saved on entry to this function. So we migrate the call to the
5855 first insn of this function. */
5857 start_sequence ();
5859 /* Do whatever the machine needs done in this case. */
5860 val = targetm.calls.expand_builtin_saveregs ();
5862 seq = get_insns ();
5863 end_sequence ();
5865 saveregs_value = val;
5867 /* Put the insns after the NOTE that starts the function. If this
5868 is inside a start_sequence, make the outer-level insn chain current, so
5869 the code is placed at the start of the function. */
5870 push_topmost_sequence ();
5871 emit_insn_after (seq, entry_of_function ());
5872 pop_topmost_sequence ();
5874 return val;
5877 /* Expand a call to __builtin_next_arg. */
5879 static rtx
5880 expand_builtin_next_arg (void)
5882 /* Checking arguments is already done in fold_builtin_next_arg
5883 that must be called before this function. */
5884 return expand_binop (ptr_mode, add_optab,
5885 crtl->args.internal_arg_pointer,
5886 crtl->args.arg_offset_rtx,
5887 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5890 /* Make it easier for the backends by protecting the valist argument
5891 from multiple evaluations. */
5893 static tree
5894 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
5896 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
5898 /* The current way of determining the type of valist is completely
5899 bogus. We should have the information on the va builtin instead. */
5900 if (!vatype)
5901 vatype = targetm.fn_abi_va_list (cfun->decl);
5903 if (TREE_CODE (vatype) == ARRAY_TYPE)
5905 if (TREE_SIDE_EFFECTS (valist))
5906 valist = save_expr (valist);
5908 /* For this case, the backends will be expecting a pointer to
5909 vatype, but it's possible we've actually been given an array
5910 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
5911 So fix it. */
5912 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5914 tree p1 = build_pointer_type (TREE_TYPE (vatype));
5915 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
5918 else
5920 tree pt = build_pointer_type (vatype);
5922 if (! needs_lvalue)
5924 if (! TREE_SIDE_EFFECTS (valist))
5925 return valist;
5927 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
5928 TREE_SIDE_EFFECTS (valist) = 1;
5931 if (TREE_SIDE_EFFECTS (valist))
5932 valist = save_expr (valist);
5933 valist = fold_build2_loc (loc, MEM_REF,
5934 vatype, valist, build_int_cst (pt, 0));
5937 return valist;
5940 /* The "standard" definition of va_list is void*. */
5942 tree
5943 std_build_builtin_va_list (void)
5945 return ptr_type_node;
5948 /* The "standard" abi va_list is va_list_type_node. */
5950 tree
5951 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
5953 return va_list_type_node;
5956 /* The "standard" type of va_list is va_list_type_node. */
5958 tree
5959 std_canonical_va_list_type (tree type)
5961 tree wtype, htype;
5963 wtype = va_list_type_node;
5964 htype = type;
5966 if (TREE_CODE (wtype) == ARRAY_TYPE)
5968 /* If va_list is an array type, the argument may have decayed
5969 to a pointer type, e.g. by being passed to another function.
5970 In that case, unwrap both types so that we can compare the
5971 underlying records. */
5972 if (TREE_CODE (htype) == ARRAY_TYPE
5973 || POINTER_TYPE_P (htype))
5975 wtype = TREE_TYPE (wtype);
5976 htype = TREE_TYPE (htype);
5979 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
5980 return va_list_type_node;
5982 return NULL_TREE;
5985 /* The "standard" implementation of va_start: just assign `nextarg' to
5986 the variable. */
5988 void
5989 std_expand_builtin_va_start (tree valist, rtx nextarg)
5991 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
5992 convert_move (va_r, nextarg, 0);
5995 /* Expand EXP, a call to __builtin_va_start. */
5997 static rtx
5998 expand_builtin_va_start (tree exp)
6000 rtx nextarg;
6001 tree valist;
6002 location_t loc = EXPR_LOCATION (exp);
6004 if (call_expr_nargs (exp) < 2)
6006 error_at (loc, "too few arguments to function %<va_start%>");
6007 return const0_rtx;
6010 if (fold_builtin_next_arg (exp, true))
6011 return const0_rtx;
6013 nextarg = expand_builtin_next_arg ();
6014 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
6016 if (targetm.expand_builtin_va_start)
6017 targetm.expand_builtin_va_start (valist, nextarg);
6018 else
6019 std_expand_builtin_va_start (valist, nextarg);
6021 return const0_rtx;
6024 /* Expand EXP, a call to __builtin_va_end. */
6026 static rtx
6027 expand_builtin_va_end (tree exp)
6029 tree valist = CALL_EXPR_ARG (exp, 0);
6031 /* Evaluate for side effects, if needed. I hate macros that don't
6032 do that. */
6033 if (TREE_SIDE_EFFECTS (valist))
6034 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
6036 return const0_rtx;
6039 /* Expand EXP, a call to __builtin_va_copy. We do this as a
6040 builtin rather than just as an assignment in stdarg.h because of the
6041 nastiness of array-type va_list types. */
6043 static rtx
6044 expand_builtin_va_copy (tree exp)
6046 tree dst, src, t;
6047 location_t loc = EXPR_LOCATION (exp);
6049 dst = CALL_EXPR_ARG (exp, 0);
6050 src = CALL_EXPR_ARG (exp, 1);
6052 dst = stabilize_va_list_loc (loc, dst, 1);
6053 src = stabilize_va_list_loc (loc, src, 0);
6055 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
6057 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
6059 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
6060 TREE_SIDE_EFFECTS (t) = 1;
6061 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6063 else
6065 rtx dstb, srcb, size;
6067 /* Evaluate to pointers. */
6068 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
6069 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
6070 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
6071 NULL_RTX, VOIDmode, EXPAND_NORMAL);
6073 dstb = convert_memory_address (Pmode, dstb);
6074 srcb = convert_memory_address (Pmode, srcb);
6076 /* "Dereference" to BLKmode memories. */
6077 dstb = gen_rtx_MEM (BLKmode, dstb);
6078 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
6079 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
6080 srcb = gen_rtx_MEM (BLKmode, srcb);
6081 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
6082 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
6084 /* Copy. */
6085 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
6088 return const0_rtx;
6091 /* Expand a call to one of the builtin functions __builtin_frame_address or
6092 __builtin_return_address. */
6094 static rtx
6095 expand_builtin_frame_address (tree fndecl, tree exp)
6097 /* The argument must be a nonnegative integer constant.
6098 It counts the number of frames to scan up the stack.
6099 The value is either the frame pointer value or the return
6100 address saved in that frame. */
6101 if (call_expr_nargs (exp) == 0)
6102 /* Warning about missing arg was already issued. */
6103 return const0_rtx;
6104 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
6106 error ("invalid argument to %qD", fndecl);
6107 return const0_rtx;
6109 else
6111 /* Number of frames to scan up the stack. */
6112 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
6114 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
6116 /* Some ports cannot access arbitrary stack frames. */
6117 if (tem == NULL)
6119 warning (0, "unsupported argument to %qD", fndecl);
6120 return const0_rtx;
6123 if (count)
6125 /* Warn since no effort is made to ensure that any frame
6126 beyond the current one exists or can be safely reached. */
6127 warning (OPT_Wframe_address, "calling %qD with "
6128 "a nonzero argument is unsafe", fndecl);
6131 /* For __builtin_frame_address, return what we've got. */
6132 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
6133 return tem;
6135 if (!REG_P (tem)
6136 && ! CONSTANT_P (tem))
6137 tem = copy_addr_to_reg (tem);
6138 return tem;
6142 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
6143 failed and the caller should emit a normal call. */
6145 static rtx
6146 expand_builtin_alloca (tree exp)
6148 rtx op0;
6149 rtx result;
6150 unsigned int align;
6151 tree fndecl = get_callee_fndecl (exp);
6152 HOST_WIDE_INT max_size;
6153 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6154 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
6155 bool valid_arglist
6156 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
6157 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
6158 VOID_TYPE)
6159 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
6160 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
6161 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
6163 if (!valid_arglist)
6164 return NULL_RTX;
6166 if ((alloca_for_var
6167 && warn_vla_limit >= HOST_WIDE_INT_MAX
6168 && warn_alloc_size_limit < warn_vla_limit)
6169 || (!alloca_for_var
6170 && warn_alloca_limit >= HOST_WIDE_INT_MAX
6171 && warn_alloc_size_limit < warn_alloca_limit
6174 /* -Walloca-larger-than and -Wvla-larger-than settings of
6175 less than HOST_WIDE_INT_MAX override the more general
6176 -Walloc-size-larger-than so unless either of the former
6177 options is smaller than the last one (wchich would imply
6178 that the call was already checked), check the alloca
6179 arguments for overflow. */
6180 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
6181 int idx[] = { 0, -1 };
6182 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
6185 /* Compute the argument. */
6186 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
6188 /* Compute the alignment. */
6189 align = (fcode == BUILT_IN_ALLOCA
6190 ? BIGGEST_ALIGNMENT
6191 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
6193 /* Compute the maximum size. */
6194 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
6195 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
6196 : -1);
6198 /* Allocate the desired space. If the allocation stems from the declaration
6199 of a variable-sized object, it cannot accumulate. */
6200 result
6201 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
6202 result = convert_memory_address (ptr_mode, result);
6204 /* Dynamic allocations for variables are recorded during gimplification. */
6205 if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
6206 record_dynamic_alloc (exp);
6208 return result;
6211 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
6212 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
6213 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
6214 handle_builtin_stack_restore function. */
6216 static rtx
6217 expand_asan_emit_allocas_unpoison (tree exp)
6219 tree arg0 = CALL_EXPR_ARG (exp, 0);
6220 tree arg1 = CALL_EXPR_ARG (exp, 1);
6221 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
6222 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
6223 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
6224 stack_pointer_rtx, NULL_RTX, 0,
6225 OPTAB_LIB_WIDEN);
6226 off = convert_modes (ptr_mode, Pmode, off, 0);
6227 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
6228 OPTAB_LIB_WIDEN);
6229 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
6230 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
6231 top, ptr_mode, bot, ptr_mode);
6232 return ret;
6235 /* Expand a call to bswap builtin in EXP.
6236 Return NULL_RTX if a normal call should be emitted rather than expanding the
6237 function in-line. If convenient, the result should be placed in TARGET.
6238 SUBTARGET may be used as the target for computing one of EXP's operands. */
6240 static rtx
6241 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
6242 rtx subtarget)
6244 tree arg;
6245 rtx op0;
6247 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
6248 return NULL_RTX;
6250 arg = CALL_EXPR_ARG (exp, 0);
6251 op0 = expand_expr (arg,
6252 subtarget && GET_MODE (subtarget) == target_mode
6253 ? subtarget : NULL_RTX,
6254 target_mode, EXPAND_NORMAL);
6255 if (GET_MODE (op0) != target_mode)
6256 op0 = convert_to_mode (target_mode, op0, 1);
6258 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
6260 gcc_assert (target);
6262 return convert_to_mode (target_mode, target, 1);
6265 /* Expand a call to a unary builtin in EXP.
6266 Return NULL_RTX if a normal call should be emitted rather than expanding the
6267 function in-line. If convenient, the result should be placed in TARGET.
6268 SUBTARGET may be used as the target for computing one of EXP's operands. */
6270 static rtx
6271 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
6272 rtx subtarget, optab op_optab)
6274 rtx op0;
6276 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
6277 return NULL_RTX;
6279 /* Compute the argument. */
6280 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
6281 (subtarget
6282 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
6283 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
6284 VOIDmode, EXPAND_NORMAL);
6285 /* Compute op, into TARGET if possible.
6286 Set TARGET to wherever the result comes back. */
6287 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
6288 op_optab, op0, target, op_optab != clrsb_optab);
6289 gcc_assert (target);
6291 return convert_to_mode (target_mode, target, 0);
6294 /* Expand a call to __builtin_expect. We just return our argument
6295 as the builtin_expect semantic should've been already executed by
6296 tree branch prediction pass. */
6298 static rtx
6299 expand_builtin_expect (tree exp, rtx target)
6301 tree arg;
6303 if (call_expr_nargs (exp) < 2)
6304 return const0_rtx;
6305 arg = CALL_EXPR_ARG (exp, 0);
6307 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
6308 /* When guessing was done, the hints should be already stripped away. */
6309 gcc_assert (!flag_guess_branch_prob
6310 || optimize == 0 || seen_error ());
6311 return target;
6314 /* Expand a call to __builtin_expect_with_probability. We just return our
6315 argument as the builtin_expect semantic should've been already executed by
6316 tree branch prediction pass. */
6318 static rtx
6319 expand_builtin_expect_with_probability (tree exp, rtx target)
6321 tree arg;
6323 if (call_expr_nargs (exp) < 3)
6324 return const0_rtx;
6325 arg = CALL_EXPR_ARG (exp, 0);
6327 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
6328 /* When guessing was done, the hints should be already stripped away. */
6329 gcc_assert (!flag_guess_branch_prob
6330 || optimize == 0 || seen_error ());
6331 return target;
6335 /* Expand a call to __builtin_assume_aligned. We just return our first
6336 argument as the builtin_assume_aligned semantic should've been already
6337 executed by CCP. */
6339 static rtx
6340 expand_builtin_assume_aligned (tree exp, rtx target)
6342 if (call_expr_nargs (exp) < 2)
6343 return const0_rtx;
6344 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
6345 EXPAND_NORMAL);
6346 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
6347 && (call_expr_nargs (exp) < 3
6348 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
6349 return target;
6352 void
6353 expand_builtin_trap (void)
6355 if (targetm.have_trap ())
6357 rtx_insn *insn = emit_insn (targetm.gen_trap ());
6358 /* For trap insns when not accumulating outgoing args force
6359 REG_ARGS_SIZE note to prevent crossjumping of calls with
6360 different args sizes. */
6361 if (!ACCUMULATE_OUTGOING_ARGS)
6362 add_args_size_note (insn, stack_pointer_delta);
6364 else
6366 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
6367 tree call_expr = build_call_expr (fn, 0);
6368 expand_call (call_expr, NULL_RTX, false);
6371 emit_barrier ();
6374 /* Expand a call to __builtin_unreachable. We do nothing except emit
6375 a barrier saying that control flow will not pass here.
6377 It is the responsibility of the program being compiled to ensure
6378 that control flow does never reach __builtin_unreachable. */
6379 static void
6380 expand_builtin_unreachable (void)
6382 emit_barrier ();
6385 /* Expand EXP, a call to fabs, fabsf or fabsl.
6386 Return NULL_RTX if a normal call should be emitted rather than expanding
6387 the function inline. If convenient, the result should be placed
6388 in TARGET. SUBTARGET may be used as the target for computing
6389 the operand. */
6391 static rtx
6392 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
6394 machine_mode mode;
6395 tree arg;
6396 rtx op0;
6398 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
6399 return NULL_RTX;
6401 arg = CALL_EXPR_ARG (exp, 0);
6402 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
6403 mode = TYPE_MODE (TREE_TYPE (arg));
6404 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6405 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
6408 /* Expand EXP, a call to copysign, copysignf, or copysignl.
6409 Return NULL is a normal call should be emitted rather than expanding the
6410 function inline. If convenient, the result should be placed in TARGET.
6411 SUBTARGET may be used as the target for computing the operand. */
6413 static rtx
6414 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
6416 rtx op0, op1;
6417 tree arg;
6419 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
6420 return NULL_RTX;
6422 arg = CALL_EXPR_ARG (exp, 0);
6423 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6425 arg = CALL_EXPR_ARG (exp, 1);
6426 op1 = expand_normal (arg);
6428 return expand_copysign (op0, op1, target);
6431 /* Expand a call to __builtin___clear_cache. */
6433 static rtx
6434 expand_builtin___clear_cache (tree exp)
6436 if (!targetm.code_for_clear_cache)
6438 #ifdef CLEAR_INSN_CACHE
6439 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
6440 does something. Just do the default expansion to a call to
6441 __clear_cache(). */
6442 return NULL_RTX;
6443 #else
6444 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
6445 does nothing. There is no need to call it. Do nothing. */
6446 return const0_rtx;
6447 #endif /* CLEAR_INSN_CACHE */
6450 /* We have a "clear_cache" insn, and it will handle everything. */
6451 tree begin, end;
6452 rtx begin_rtx, end_rtx;
6454 /* We must not expand to a library call. If we did, any
6455 fallback library function in libgcc that might contain a call to
6456 __builtin___clear_cache() would recurse infinitely. */
6457 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6459 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
6460 return const0_rtx;
6463 if (targetm.have_clear_cache ())
6465 class expand_operand ops[2];
6467 begin = CALL_EXPR_ARG (exp, 0);
6468 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
6470 end = CALL_EXPR_ARG (exp, 1);
6471 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
6473 create_address_operand (&ops[0], begin_rtx);
6474 create_address_operand (&ops[1], end_rtx);
6475 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
6476 return const0_rtx;
6478 return const0_rtx;
6481 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
6483 static rtx
6484 round_trampoline_addr (rtx tramp)
6486 rtx temp, addend, mask;
6488 /* If we don't need too much alignment, we'll have been guaranteed
6489 proper alignment by get_trampoline_type. */
6490 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
6491 return tramp;
6493 /* Round address up to desired boundary. */
6494 temp = gen_reg_rtx (Pmode);
6495 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
6496 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
6498 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
6499 temp, 0, OPTAB_LIB_WIDEN);
6500 tramp = expand_simple_binop (Pmode, AND, temp, mask,
6501 temp, 0, OPTAB_LIB_WIDEN);
6503 return tramp;
6506 static rtx
6507 expand_builtin_init_trampoline (tree exp, bool onstack)
6509 tree t_tramp, t_func, t_chain;
6510 rtx m_tramp, r_tramp, r_chain, tmp;
6512 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
6513 POINTER_TYPE, VOID_TYPE))
6514 return NULL_RTX;
6516 t_tramp = CALL_EXPR_ARG (exp, 0);
6517 t_func = CALL_EXPR_ARG (exp, 1);
6518 t_chain = CALL_EXPR_ARG (exp, 2);
6520 r_tramp = expand_normal (t_tramp);
6521 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
6522 MEM_NOTRAP_P (m_tramp) = 1;
6524 /* If ONSTACK, the TRAMP argument should be the address of a field
6525 within the local function's FRAME decl. Either way, let's see if
6526 we can fill in the MEM_ATTRs for this memory. */
6527 if (TREE_CODE (t_tramp) == ADDR_EXPR)
6528 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
6530 /* Creator of a heap trampoline is responsible for making sure the
6531 address is aligned to at least STACK_BOUNDARY. Normally malloc
6532 will ensure this anyhow. */
6533 tmp = round_trampoline_addr (r_tramp);
6534 if (tmp != r_tramp)
6536 m_tramp = change_address (m_tramp, BLKmode, tmp);
6537 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
6538 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
6541 /* The FUNC argument should be the address of the nested function.
6542 Extract the actual function decl to pass to the hook. */
6543 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
6544 t_func = TREE_OPERAND (t_func, 0);
6545 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
6547 r_chain = expand_normal (t_chain);
6549 /* Generate insns to initialize the trampoline. */
6550 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
6552 if (onstack)
6554 trampolines_created = 1;
6556 if (targetm.calls.custom_function_descriptors != 0)
6557 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
6558 "trampoline generated for nested function %qD", t_func);
6561 return const0_rtx;
6564 static rtx
6565 expand_builtin_adjust_trampoline (tree exp)
6567 rtx tramp;
6569 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6570 return NULL_RTX;
6572 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
6573 tramp = round_trampoline_addr (tramp);
6574 if (targetm.calls.trampoline_adjust_address)
6575 tramp = targetm.calls.trampoline_adjust_address (tramp);
6577 return tramp;
6580 /* Expand a call to the builtin descriptor initialization routine.
6581 A descriptor is made up of a couple of pointers to the static
6582 chain and the code entry in this order. */
6584 static rtx
6585 expand_builtin_init_descriptor (tree exp)
6587 tree t_descr, t_func, t_chain;
6588 rtx m_descr, r_descr, r_func, r_chain;
6590 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
6591 VOID_TYPE))
6592 return NULL_RTX;
6594 t_descr = CALL_EXPR_ARG (exp, 0);
6595 t_func = CALL_EXPR_ARG (exp, 1);
6596 t_chain = CALL_EXPR_ARG (exp, 2);
6598 r_descr = expand_normal (t_descr);
6599 m_descr = gen_rtx_MEM (BLKmode, r_descr);
6600 MEM_NOTRAP_P (m_descr) = 1;
6601 set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
6603 r_func = expand_normal (t_func);
6604 r_chain = expand_normal (t_chain);
6606 /* Generate insns to initialize the descriptor. */
6607 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
6608 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
6609 POINTER_SIZE / BITS_PER_UNIT), r_func);
6611 return const0_rtx;
6614 /* Expand a call to the builtin descriptor adjustment routine. */
6616 static rtx
6617 expand_builtin_adjust_descriptor (tree exp)
6619 rtx tramp;
6621 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6622 return NULL_RTX;
6624 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
6626 /* Unalign the descriptor to allow runtime identification. */
6627 tramp = plus_constant (ptr_mode, tramp,
6628 targetm.calls.custom_function_descriptors);
6630 return force_operand (tramp, NULL_RTX);
6633 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
6634 function. The function first checks whether the back end provides
6635 an insn to implement signbit for the respective mode. If not, it
6636 checks whether the floating point format of the value is such that
6637 the sign bit can be extracted. If that is not the case, error out.
6638 EXP is the expression that is a call to the builtin function; if
6639 convenient, the result should be placed in TARGET. */
6640 static rtx
6641 expand_builtin_signbit (tree exp, rtx target)
6643 const struct real_format *fmt;
6644 scalar_float_mode fmode;
6645 scalar_int_mode rmode, imode;
6646 tree arg;
6647 int word, bitpos;
6648 enum insn_code icode;
6649 rtx temp;
6650 location_t loc = EXPR_LOCATION (exp);
6652 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
6653 return NULL_RTX;
6655 arg = CALL_EXPR_ARG (exp, 0);
6656 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
6657 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
6658 fmt = REAL_MODE_FORMAT (fmode);
6660 arg = builtin_save_expr (arg);
6662 /* Expand the argument yielding a RTX expression. */
6663 temp = expand_normal (arg);
6665 /* Check if the back end provides an insn that handles signbit for the
6666 argument's mode. */
6667 icode = optab_handler (signbit_optab, fmode);
6668 if (icode != CODE_FOR_nothing)
6670 rtx_insn *last = get_last_insn ();
6671 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6672 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
6673 return target;
6674 delete_insns_since (last);
6677 /* For floating point formats without a sign bit, implement signbit
6678 as "ARG < 0.0". */
6679 bitpos = fmt->signbit_ro;
6680 if (bitpos < 0)
6682 /* But we can't do this if the format supports signed zero. */
6683 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
6685 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
6686 build_real (TREE_TYPE (arg), dconst0));
6687 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
6690 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
6692 imode = int_mode_for_mode (fmode).require ();
6693 temp = gen_lowpart (imode, temp);
6695 else
6697 imode = word_mode;
6698 /* Handle targets with different FP word orders. */
6699 if (FLOAT_WORDS_BIG_ENDIAN)
6700 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
6701 else
6702 word = bitpos / BITS_PER_WORD;
6703 temp = operand_subword_force (temp, word, fmode);
6704 bitpos = bitpos % BITS_PER_WORD;
6707 /* Force the intermediate word_mode (or narrower) result into a
6708 register. This avoids attempting to create paradoxical SUBREGs
6709 of floating point modes below. */
6710 temp = force_reg (imode, temp);
6712 /* If the bitpos is within the "result mode" lowpart, the operation
6713 can be implement with a single bitwise AND. Otherwise, we need
6714 a right shift and an AND. */
6716 if (bitpos < GET_MODE_BITSIZE (rmode))
6718 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
6720 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
6721 temp = gen_lowpart (rmode, temp);
6722 temp = expand_binop (rmode, and_optab, temp,
6723 immed_wide_int_const (mask, rmode),
6724 NULL_RTX, 1, OPTAB_LIB_WIDEN);
6726 else
6728 /* Perform a logical right shift to place the signbit in the least
6729 significant bit, then truncate the result to the desired mode
6730 and mask just this bit. */
6731 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
6732 temp = gen_lowpart (rmode, temp);
6733 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
6734 NULL_RTX, 1, OPTAB_LIB_WIDEN);
6737 return temp;
6740 /* Expand fork or exec calls. TARGET is the desired target of the
6741 call. EXP is the call. FN is the
6742 identificator of the actual function. IGNORE is nonzero if the
6743 value is to be ignored. */
6745 static rtx
6746 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
6748 tree id, decl;
6749 tree call;
6751 if (DECL_FUNCTION_CODE (fn) != BUILT_IN_FORK)
6753 tree path = CALL_EXPR_ARG (exp, 0);
6754 /* Detect unterminated path. */
6755 if (!check_read_access (exp, path))
6756 return NULL_RTX;
6758 /* Also detect unterminated first argument. */
6759 switch (DECL_FUNCTION_CODE (fn))
6761 case BUILT_IN_EXECL:
6762 case BUILT_IN_EXECLE:
6763 case BUILT_IN_EXECLP:
6764 if (!check_read_access (exp, path))
6765 return NULL_RTX;
6766 default:
6767 break;
6772 /* If we are not profiling, just call the function. */
6773 if (!profile_arc_flag)
6774 return NULL_RTX;
6776 /* Otherwise call the wrapper. This should be equivalent for the rest of
6777 compiler, so the code does not diverge, and the wrapper may run the
6778 code necessary for keeping the profiling sane. */
6780 switch (DECL_FUNCTION_CODE (fn))
6782 case BUILT_IN_FORK:
6783 id = get_identifier ("__gcov_fork");
6784 break;
6786 case BUILT_IN_EXECL:
6787 id = get_identifier ("__gcov_execl");
6788 break;
6790 case BUILT_IN_EXECV:
6791 id = get_identifier ("__gcov_execv");
6792 break;
6794 case BUILT_IN_EXECLP:
6795 id = get_identifier ("__gcov_execlp");
6796 break;
6798 case BUILT_IN_EXECLE:
6799 id = get_identifier ("__gcov_execle");
6800 break;
6802 case BUILT_IN_EXECVP:
6803 id = get_identifier ("__gcov_execvp");
6804 break;
6806 case BUILT_IN_EXECVE:
6807 id = get_identifier ("__gcov_execve");
6808 break;
6810 default:
6811 gcc_unreachable ();
6814 decl = build_decl (DECL_SOURCE_LOCATION (fn),
6815 FUNCTION_DECL, id, TREE_TYPE (fn));
6816 DECL_EXTERNAL (decl) = 1;
6817 TREE_PUBLIC (decl) = 1;
6818 DECL_ARTIFICIAL (decl) = 1;
6819 TREE_NOTHROW (decl) = 1;
6820 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
6821 DECL_VISIBILITY_SPECIFIED (decl) = 1;
6822 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
6823 return expand_call (call, target, ignore);
6828 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
6829 the pointer in these functions is void*, the tree optimizers may remove
6830 casts. The mode computed in expand_builtin isn't reliable either, due
6831 to __sync_bool_compare_and_swap.
6833 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
6834 group of builtins. This gives us log2 of the mode size. */
6836 static inline machine_mode
6837 get_builtin_sync_mode (int fcode_diff)
6839 /* The size is not negotiable, so ask not to get BLKmode in return
6840 if the target indicates that a smaller size would be better. */
6841 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
6844 /* Expand the memory expression LOC and return the appropriate memory operand
6845 for the builtin_sync operations. */
6847 static rtx
6848 get_builtin_sync_mem (tree loc, machine_mode mode)
6850 rtx addr, mem;
6851 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
6852 ? TREE_TYPE (TREE_TYPE (loc))
6853 : TREE_TYPE (loc));
6854 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
6856 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
6857 addr = convert_memory_address (addr_mode, addr);
6859 /* Note that we explicitly do not want any alias information for this
6860 memory, so that we kill all other live memories. Otherwise we don't
6861 satisfy the full barrier semantics of the intrinsic. */
6862 mem = gen_rtx_MEM (mode, addr);
6864 set_mem_addr_space (mem, addr_space);
6866 mem = validize_mem (mem);
6868 /* The alignment needs to be at least according to that of the mode. */
6869 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
6870 get_pointer_alignment (loc)));
6871 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6872 MEM_VOLATILE_P (mem) = 1;
6874 return mem;
6877 /* Make sure an argument is in the right mode.
6878 EXP is the tree argument.
6879 MODE is the mode it should be in. */
6881 static rtx
6882 expand_expr_force_mode (tree exp, machine_mode mode)
6884 rtx val;
6885 machine_mode old_mode;
6887 if (TREE_CODE (exp) == SSA_NAME
6888 && TYPE_MODE (TREE_TYPE (exp)) != mode)
6890 /* Undo argument promotion if possible, as combine might not
6891 be able to do it later due to MEM_VOLATILE_P uses in the
6892 patterns. */
6893 gimple *g = get_gimple_for_ssa_name (exp);
6894 if (g && gimple_assign_cast_p (g))
6896 tree rhs = gimple_assign_rhs1 (g);
6897 tree_code code = gimple_assign_rhs_code (g);
6898 if (CONVERT_EXPR_CODE_P (code)
6899 && TYPE_MODE (TREE_TYPE (rhs)) == mode
6900 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
6901 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
6902 && (TYPE_PRECISION (TREE_TYPE (exp))
6903 > TYPE_PRECISION (TREE_TYPE (rhs))))
6904 exp = rhs;
6908 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
6909 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6910 of CONST_INTs, where we know the old_mode only from the call argument. */
6912 old_mode = GET_MODE (val);
6913 if (old_mode == VOIDmode)
6914 old_mode = TYPE_MODE (TREE_TYPE (exp));
6915 val = convert_modes (mode, old_mode, val, 1);
6916 return val;
6920 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6921 EXP is the CALL_EXPR. CODE is the rtx code
6922 that corresponds to the arithmetic or logical operation from the name;
6923 an exception here is that NOT actually means NAND. TARGET is an optional
6924 place for us to store the results; AFTER is true if this is the
6925 fetch_and_xxx form. */
6927 static rtx
6928 expand_builtin_sync_operation (machine_mode mode, tree exp,
6929 enum rtx_code code, bool after,
6930 rtx target)
6932 rtx val, mem;
6933 location_t loc = EXPR_LOCATION (exp);
6935 if (code == NOT && warn_sync_nand)
6937 tree fndecl = get_callee_fndecl (exp);
6938 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6940 static bool warned_f_a_n, warned_n_a_f;
6942 switch (fcode)
6944 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6945 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6946 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6947 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6948 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6949 if (warned_f_a_n)
6950 break;
6952 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
6953 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6954 warned_f_a_n = true;
6955 break;
6957 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6958 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6959 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6960 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6961 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6962 if (warned_n_a_f)
6963 break;
6965 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
6966 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6967 warned_n_a_f = true;
6968 break;
6970 default:
6971 gcc_unreachable ();
6975 /* Expand the operands. */
6976 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6977 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6979 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
6980 after);
6983 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6984 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6985 true if this is the boolean form. TARGET is a place for us to store the
6986 results; this is NOT optional if IS_BOOL is true. */
6988 static rtx
6989 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
6990 bool is_bool, rtx target)
6992 rtx old_val, new_val, mem;
6993 rtx *pbool, *poval;
6995 /* Expand the operands. */
6996 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6997 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6998 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
7000 pbool = poval = NULL;
7001 if (target != const0_rtx)
7003 if (is_bool)
7004 pbool = &target;
7005 else
7006 poval = &target;
7008 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
7009 false, MEMMODEL_SYNC_SEQ_CST,
7010 MEMMODEL_SYNC_SEQ_CST))
7011 return NULL_RTX;
7013 return target;
7016 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
7017 general form is actually an atomic exchange, and some targets only
7018 support a reduced form with the second argument being a constant 1.
7019 EXP is the CALL_EXPR; TARGET is an optional place for us to store
7020 the results. */
7022 static rtx
7023 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
7024 rtx target)
7026 rtx val, mem;
7028 /* Expand the operands. */
7029 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7030 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
7032 return expand_sync_lock_test_and_set (target, mem, val);
7035 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
7037 static void
7038 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
7040 rtx mem;
7042 /* Expand the operands. */
7043 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7045 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
7048 /* Given an integer representing an ``enum memmodel'', verify its
7049 correctness and return the memory model enum. */
7051 static enum memmodel
7052 get_memmodel (tree exp)
7054 rtx op;
7055 unsigned HOST_WIDE_INT val;
7056 location_t loc
7057 = expansion_point_location_if_in_system_header (input_location);
7059 /* If the parameter is not a constant, it's a run time value so we'll just
7060 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
7061 if (TREE_CODE (exp) != INTEGER_CST)
7062 return MEMMODEL_SEQ_CST;
7064 op = expand_normal (exp);
7066 val = INTVAL (op);
7067 if (targetm.memmodel_check)
7068 val = targetm.memmodel_check (val);
7069 else if (val & ~MEMMODEL_MASK)
7071 warning_at (loc, OPT_Winvalid_memory_model,
7072 "unknown architecture specifier in memory model to builtin");
7073 return MEMMODEL_SEQ_CST;
7076 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
7077 if (memmodel_base (val) >= MEMMODEL_LAST)
7079 warning_at (loc, OPT_Winvalid_memory_model,
7080 "invalid memory model argument to builtin");
7081 return MEMMODEL_SEQ_CST;
7084 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
7085 be conservative and promote consume to acquire. */
7086 if (val == MEMMODEL_CONSUME)
7087 val = MEMMODEL_ACQUIRE;
7089 return (enum memmodel) val;
7092 /* Expand the __atomic_exchange intrinsic:
7093 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
7094 EXP is the CALL_EXPR.
7095 TARGET is an optional place for us to store the results. */
7097 static rtx
7098 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
7100 rtx val, mem;
7101 enum memmodel model;
7103 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
7105 if (!flag_inline_atomics)
7106 return NULL_RTX;
7108 /* Expand the operands. */
7109 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7110 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
7112 return expand_atomic_exchange (target, mem, val, model);
7115 /* Expand the __atomic_compare_exchange intrinsic:
7116 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
7117 TYPE desired, BOOL weak,
7118 enum memmodel success,
7119 enum memmodel failure)
7120 EXP is the CALL_EXPR.
7121 TARGET is an optional place for us to store the results. */
7123 static rtx
7124 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
7125 rtx target)
7127 rtx expect, desired, mem, oldval;
7128 rtx_code_label *label;
7129 enum memmodel success, failure;
7130 tree weak;
7131 bool is_weak;
7132 location_t loc
7133 = expansion_point_location_if_in_system_header (input_location);
7135 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
7136 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
7138 if (failure > success)
7140 warning_at (loc, OPT_Winvalid_memory_model,
7141 "failure memory model cannot be stronger than success "
7142 "memory model for %<__atomic_compare_exchange%>");
7143 success = MEMMODEL_SEQ_CST;
7146 if (is_mm_release (failure) || is_mm_acq_rel (failure))
7148 warning_at (loc, OPT_Winvalid_memory_model,
7149 "invalid failure memory model for "
7150 "%<__atomic_compare_exchange%>");
7151 failure = MEMMODEL_SEQ_CST;
7152 success = MEMMODEL_SEQ_CST;
7156 if (!flag_inline_atomics)
7157 return NULL_RTX;
7159 /* Expand the operands. */
7160 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7162 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
7163 expect = convert_memory_address (Pmode, expect);
7164 expect = gen_rtx_MEM (mode, expect);
7165 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
7167 weak = CALL_EXPR_ARG (exp, 3);
7168 is_weak = false;
7169 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
7170 is_weak = true;
7172 if (target == const0_rtx)
7173 target = NULL;
7175 /* Lest the rtl backend create a race condition with an imporoper store
7176 to memory, always create a new pseudo for OLDVAL. */
7177 oldval = NULL;
7179 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
7180 is_weak, success, failure))
7181 return NULL_RTX;
7183 /* Conditionally store back to EXPECT, lest we create a race condition
7184 with an improper store to memory. */
7185 /* ??? With a rearrangement of atomics at the gimple level, we can handle
7186 the normal case where EXPECT is totally private, i.e. a register. At
7187 which point the store can be unconditional. */
7188 label = gen_label_rtx ();
7189 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
7190 GET_MODE (target), 1, label);
7191 emit_move_insn (expect, oldval);
7192 emit_label (label);
7194 return target;
7197 /* Helper function for expand_ifn_atomic_compare_exchange - expand
7198 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
7199 call. The weak parameter must be dropped to match the expected parameter
7200 list and the expected argument changed from value to pointer to memory
7201 slot. */
7203 static void
7204 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
7206 unsigned int z;
7207 vec<tree, va_gc> *vec;
7209 vec_alloc (vec, 5);
7210 vec->quick_push (gimple_call_arg (call, 0));
7211 tree expected = gimple_call_arg (call, 1);
7212 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
7213 TREE_TYPE (expected));
7214 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
7215 if (expd != x)
7216 emit_move_insn (x, expd);
7217 tree v = make_tree (TREE_TYPE (expected), x);
7218 vec->quick_push (build1 (ADDR_EXPR,
7219 build_pointer_type (TREE_TYPE (expected)), v));
7220 vec->quick_push (gimple_call_arg (call, 2));
7221 /* Skip the boolean weak parameter. */
7222 for (z = 4; z < 6; z++)
7223 vec->quick_push (gimple_call_arg (call, z));
7224 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
7225 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
7226 gcc_assert (bytes_log2 < 5);
7227 built_in_function fncode
7228 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
7229 + bytes_log2);
7230 tree fndecl = builtin_decl_explicit (fncode);
7231 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
7232 fndecl);
7233 tree exp = build_call_vec (boolean_type_node, fn, vec);
7234 tree lhs = gimple_call_lhs (call);
7235 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
7236 if (lhs)
7238 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
7239 if (GET_MODE (boolret) != mode)
7240 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
7241 x = force_reg (mode, x);
7242 write_complex_part (target, boolret, true);
7243 write_complex_part (target, x, false);
7247 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
7249 void
7250 expand_ifn_atomic_compare_exchange (gcall *call)
7252 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
7253 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
7254 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
7255 rtx expect, desired, mem, oldval, boolret;
7256 enum memmodel success, failure;
7257 tree lhs;
7258 bool is_weak;
7259 location_t loc
7260 = expansion_point_location_if_in_system_header (gimple_location (call));
7262 success = get_memmodel (gimple_call_arg (call, 4));
7263 failure = get_memmodel (gimple_call_arg (call, 5));
7265 if (failure > success)
7267 warning_at (loc, OPT_Winvalid_memory_model,
7268 "failure memory model cannot be stronger than success "
7269 "memory model for %<__atomic_compare_exchange%>");
7270 success = MEMMODEL_SEQ_CST;
7273 if (is_mm_release (failure) || is_mm_acq_rel (failure))
7275 warning_at (loc, OPT_Winvalid_memory_model,
7276 "invalid failure memory model for "
7277 "%<__atomic_compare_exchange%>");
7278 failure = MEMMODEL_SEQ_CST;
7279 success = MEMMODEL_SEQ_CST;
7282 if (!flag_inline_atomics)
7284 expand_ifn_atomic_compare_exchange_into_call (call, mode);
7285 return;
7288 /* Expand the operands. */
7289 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
7291 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
7292 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
7294 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
7296 boolret = NULL;
7297 oldval = NULL;
7299 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
7300 is_weak, success, failure))
7302 expand_ifn_atomic_compare_exchange_into_call (call, mode);
7303 return;
7306 lhs = gimple_call_lhs (call);
7307 if (lhs)
7309 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
7310 if (GET_MODE (boolret) != mode)
7311 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
7312 write_complex_part (target, boolret, true);
7313 write_complex_part (target, oldval, false);
7317 /* Expand the __atomic_load intrinsic:
7318 TYPE __atomic_load (TYPE *object, enum memmodel)
7319 EXP is the CALL_EXPR.
7320 TARGET is an optional place for us to store the results. */
7322 static rtx
7323 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
7325 rtx mem;
7326 enum memmodel model;
7328 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7329 if (is_mm_release (model) || is_mm_acq_rel (model))
7331 location_t loc
7332 = expansion_point_location_if_in_system_header (input_location);
7333 warning_at (loc, OPT_Winvalid_memory_model,
7334 "invalid memory model for %<__atomic_load%>");
7335 model = MEMMODEL_SEQ_CST;
7338 if (!flag_inline_atomics)
7339 return NULL_RTX;
7341 /* Expand the operand. */
7342 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7344 return expand_atomic_load (target, mem, model);
7348 /* Expand the __atomic_store intrinsic:
7349 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
7350 EXP is the CALL_EXPR.
7351 TARGET is an optional place for us to store the results. */
7353 static rtx
7354 expand_builtin_atomic_store (machine_mode mode, tree exp)
7356 rtx mem, val;
7357 enum memmodel model;
7359 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
7360 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
7361 || is_mm_release (model)))
7363 location_t loc
7364 = expansion_point_location_if_in_system_header (input_location);
7365 warning_at (loc, OPT_Winvalid_memory_model,
7366 "invalid memory model for %<__atomic_store%>");
7367 model = MEMMODEL_SEQ_CST;
7370 if (!flag_inline_atomics)
7371 return NULL_RTX;
7373 /* Expand the operands. */
7374 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7375 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
7377 return expand_atomic_store (mem, val, model, false);
7380 /* Expand the __atomic_fetch_XXX intrinsic:
7381 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
7382 EXP is the CALL_EXPR.
7383 TARGET is an optional place for us to store the results.
7384 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
7385 FETCH_AFTER is true if returning the result of the operation.
7386 FETCH_AFTER is false if returning the value before the operation.
7387 IGNORE is true if the result is not used.
7388 EXT_CALL is the correct builtin for an external call if this cannot be
7389 resolved to an instruction sequence. */
7391 static rtx
7392 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
7393 enum rtx_code code, bool fetch_after,
7394 bool ignore, enum built_in_function ext_call)
7396 rtx val, mem, ret;
7397 enum memmodel model;
7398 tree fndecl;
7399 tree addr;
7401 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
7403 /* Expand the operands. */
7404 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7405 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
7407 /* Only try generating instructions if inlining is turned on. */
7408 if (flag_inline_atomics)
7410 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
7411 if (ret)
7412 return ret;
7415 /* Return if a different routine isn't needed for the library call. */
7416 if (ext_call == BUILT_IN_NONE)
7417 return NULL_RTX;
7419 /* Change the call to the specified function. */
7420 fndecl = get_callee_fndecl (exp);
7421 addr = CALL_EXPR_FN (exp);
7422 STRIP_NOPS (addr);
7424 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
7425 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
7427 /* If we will emit code after the call, the call cannot be a tail call.
7428 If it is emitted as a tail call, a barrier is emitted after it, and
7429 then all trailing code is removed. */
7430 if (!ignore)
7431 CALL_EXPR_TAILCALL (exp) = 0;
7433 /* Expand the call here so we can emit trailing code. */
7434 ret = expand_call (exp, target, ignore);
7436 /* Replace the original function just in case it matters. */
7437 TREE_OPERAND (addr, 0) = fndecl;
7439 /* Then issue the arithmetic correction to return the right result. */
7440 if (!ignore)
7442 if (code == NOT)
7444 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
7445 OPTAB_LIB_WIDEN);
7446 ret = expand_simple_unop (mode, NOT, ret, target, true);
7448 else
7449 ret = expand_simple_binop (mode, code, ret, val, target, true,
7450 OPTAB_LIB_WIDEN);
7452 return ret;
7455 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
7457 void
7458 expand_ifn_atomic_bit_test_and (gcall *call)
7460 tree ptr = gimple_call_arg (call, 0);
7461 tree bit = gimple_call_arg (call, 1);
7462 tree flag = gimple_call_arg (call, 2);
7463 tree lhs = gimple_call_lhs (call);
7464 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
7465 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
7466 enum rtx_code code;
7467 optab optab;
7468 class expand_operand ops[5];
7470 gcc_assert (flag_inline_atomics);
7472 if (gimple_call_num_args (call) == 4)
7473 model = get_memmodel (gimple_call_arg (call, 3));
7475 rtx mem = get_builtin_sync_mem (ptr, mode);
7476 rtx val = expand_expr_force_mode (bit, mode);
7478 switch (gimple_call_internal_fn (call))
7480 case IFN_ATOMIC_BIT_TEST_AND_SET:
7481 code = IOR;
7482 optab = atomic_bit_test_and_set_optab;
7483 break;
7484 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
7485 code = XOR;
7486 optab = atomic_bit_test_and_complement_optab;
7487 break;
7488 case IFN_ATOMIC_BIT_TEST_AND_RESET:
7489 code = AND;
7490 optab = atomic_bit_test_and_reset_optab;
7491 break;
7492 default:
7493 gcc_unreachable ();
7496 if (lhs == NULL_TREE)
7498 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
7499 val, NULL_RTX, true, OPTAB_DIRECT);
7500 if (code == AND)
7501 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
7502 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
7503 return;
7506 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
7507 enum insn_code icode = direct_optab_handler (optab, mode);
7508 gcc_assert (icode != CODE_FOR_nothing);
7509 create_output_operand (&ops[0], target, mode);
7510 create_fixed_operand (&ops[1], mem);
7511 create_convert_operand_to (&ops[2], val, mode, true);
7512 create_integer_operand (&ops[3], model);
7513 create_integer_operand (&ops[4], integer_onep (flag));
7514 if (maybe_expand_insn (icode, 5, ops))
7515 return;
7517 rtx bitval = val;
7518 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
7519 val, NULL_RTX, true, OPTAB_DIRECT);
7520 rtx maskval = val;
7521 if (code == AND)
7522 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
7523 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
7524 code, model, false);
7525 if (integer_onep (flag))
7527 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
7528 NULL_RTX, true, OPTAB_DIRECT);
7529 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
7530 true, OPTAB_DIRECT);
7532 else
7533 result = expand_simple_binop (mode, AND, result, maskval, target, true,
7534 OPTAB_DIRECT);
7535 if (result != target)
7536 emit_move_insn (target, result);
7539 /* Expand an atomic clear operation.
7540 void _atomic_clear (BOOL *obj, enum memmodel)
7541 EXP is the call expression. */
7543 static rtx
7544 expand_builtin_atomic_clear (tree exp)
7546 machine_mode mode;
7547 rtx mem, ret;
7548 enum memmodel model;
7550 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
7551 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7552 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7554 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
7556 location_t loc
7557 = expansion_point_location_if_in_system_header (input_location);
7558 warning_at (loc, OPT_Winvalid_memory_model,
7559 "invalid memory model for %<__atomic_store%>");
7560 model = MEMMODEL_SEQ_CST;
7563 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
7564 Failing that, a store is issued by __atomic_store. The only way this can
7565 fail is if the bool type is larger than a word size. Unlikely, but
7566 handle it anyway for completeness. Assume a single threaded model since
7567 there is no atomic support in this case, and no barriers are required. */
7568 ret = expand_atomic_store (mem, const0_rtx, model, true);
7569 if (!ret)
7570 emit_move_insn (mem, const0_rtx);
7571 return const0_rtx;
7574 /* Expand an atomic test_and_set operation.
7575 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
7576 EXP is the call expression. */
7578 static rtx
7579 expand_builtin_atomic_test_and_set (tree exp, rtx target)
7581 rtx mem;
7582 enum memmodel model;
7583 machine_mode mode;
7585 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
7586 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7587 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7589 return expand_atomic_test_and_set (target, mem, model);
7593 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
7594 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
7596 static tree
7597 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
7599 int size;
7600 machine_mode mode;
7601 unsigned int mode_align, type_align;
7603 if (TREE_CODE (arg0) != INTEGER_CST)
7604 return NULL_TREE;
7606 /* We need a corresponding integer mode for the access to be lock-free. */
7607 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
7608 if (!int_mode_for_size (size, 0).exists (&mode))
7609 return boolean_false_node;
7611 mode_align = GET_MODE_ALIGNMENT (mode);
7613 if (TREE_CODE (arg1) == INTEGER_CST)
7615 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
7617 /* Either this argument is null, or it's a fake pointer encoding
7618 the alignment of the object. */
7619 val = least_bit_hwi (val);
7620 val *= BITS_PER_UNIT;
7622 if (val == 0 || mode_align < val)
7623 type_align = mode_align;
7624 else
7625 type_align = val;
7627 else
7629 tree ttype = TREE_TYPE (arg1);
7631 /* This function is usually invoked and folded immediately by the front
7632 end before anything else has a chance to look at it. The pointer
7633 parameter at this point is usually cast to a void *, so check for that
7634 and look past the cast. */
7635 if (CONVERT_EXPR_P (arg1)
7636 && POINTER_TYPE_P (ttype)
7637 && VOID_TYPE_P (TREE_TYPE (ttype))
7638 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
7639 arg1 = TREE_OPERAND (arg1, 0);
7641 ttype = TREE_TYPE (arg1);
7642 gcc_assert (POINTER_TYPE_P (ttype));
7644 /* Get the underlying type of the object. */
7645 ttype = TREE_TYPE (ttype);
7646 type_align = TYPE_ALIGN (ttype);
7649 /* If the object has smaller alignment, the lock free routines cannot
7650 be used. */
7651 if (type_align < mode_align)
7652 return boolean_false_node;
7654 /* Check if a compare_and_swap pattern exists for the mode which represents
7655 the required size. The pattern is not allowed to fail, so the existence
7656 of the pattern indicates support is present. Also require that an
7657 atomic load exists for the required size. */
7658 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
7659 return boolean_true_node;
7660 else
7661 return boolean_false_node;
7664 /* Return true if the parameters to call EXP represent an object which will
7665 always generate lock free instructions. The first argument represents the
7666 size of the object, and the second parameter is a pointer to the object
7667 itself. If NULL is passed for the object, then the result is based on
7668 typical alignment for an object of the specified size. Otherwise return
7669 false. */
7671 static rtx
7672 expand_builtin_atomic_always_lock_free (tree exp)
7674 tree size;
7675 tree arg0 = CALL_EXPR_ARG (exp, 0);
7676 tree arg1 = CALL_EXPR_ARG (exp, 1);
7678 if (TREE_CODE (arg0) != INTEGER_CST)
7680 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
7681 return const0_rtx;
7684 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
7685 if (size == boolean_true_node)
7686 return const1_rtx;
7687 return const0_rtx;
7690 /* Return a one or zero if it can be determined that object ARG1 of size ARG
7691 is lock free on this architecture. */
7693 static tree
7694 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
7696 if (!flag_inline_atomics)
7697 return NULL_TREE;
7699 /* If it isn't always lock free, don't generate a result. */
7700 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
7701 return boolean_true_node;
7703 return NULL_TREE;
7706 /* Return true if the parameters to call EXP represent an object which will
7707 always generate lock free instructions. The first argument represents the
7708 size of the object, and the second parameter is a pointer to the object
7709 itself. If NULL is passed for the object, then the result is based on
7710 typical alignment for an object of the specified size. Otherwise return
7711 NULL*/
7713 static rtx
7714 expand_builtin_atomic_is_lock_free (tree exp)
7716 tree size;
7717 tree arg0 = CALL_EXPR_ARG (exp, 0);
7718 tree arg1 = CALL_EXPR_ARG (exp, 1);
7720 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
7722 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
7723 return NULL_RTX;
7726 if (!flag_inline_atomics)
7727 return NULL_RTX;
7729 /* If the value is known at compile time, return the RTX for it. */
7730 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
7731 if (size == boolean_true_node)
7732 return const1_rtx;
7734 return NULL_RTX;
7737 /* Expand the __atomic_thread_fence intrinsic:
7738 void __atomic_thread_fence (enum memmodel)
7739 EXP is the CALL_EXPR. */
7741 static void
7742 expand_builtin_atomic_thread_fence (tree exp)
7744 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
7745 expand_mem_thread_fence (model);
7748 /* Expand the __atomic_signal_fence intrinsic:
7749 void __atomic_signal_fence (enum memmodel)
7750 EXP is the CALL_EXPR. */
7752 static void
7753 expand_builtin_atomic_signal_fence (tree exp)
7755 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
7756 expand_mem_signal_fence (model);
7759 /* Expand the __sync_synchronize intrinsic. */
7761 static void
7762 expand_builtin_sync_synchronize (void)
7764 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
7767 static rtx
7768 expand_builtin_thread_pointer (tree exp, rtx target)
7770 enum insn_code icode;
7771 if (!validate_arglist (exp, VOID_TYPE))
7772 return const0_rtx;
7773 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
7774 if (icode != CODE_FOR_nothing)
7776 class expand_operand op;
7777 /* If the target is not sutitable then create a new target. */
7778 if (target == NULL_RTX
7779 || !REG_P (target)
7780 || GET_MODE (target) != Pmode)
7781 target = gen_reg_rtx (Pmode);
7782 create_output_operand (&op, target, Pmode);
7783 expand_insn (icode, 1, &op);
7784 return target;
7786 error ("%<__builtin_thread_pointer%> is not supported on this target");
7787 return const0_rtx;
7790 static void
7791 expand_builtin_set_thread_pointer (tree exp)
7793 enum insn_code icode;
7794 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7795 return;
7796 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
7797 if (icode != CODE_FOR_nothing)
7799 class expand_operand op;
7800 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
7801 Pmode, EXPAND_NORMAL);
7802 create_input_operand (&op, val, Pmode);
7803 expand_insn (icode, 1, &op);
7804 return;
7806 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
7810 /* Emit code to restore the current value of stack. */
7812 static void
7813 expand_stack_restore (tree var)
7815 rtx_insn *prev;
7816 rtx sa = expand_normal (var);
7818 sa = convert_memory_address (Pmode, sa);
7820 prev = get_last_insn ();
7821 emit_stack_restore (SAVE_BLOCK, sa);
7823 record_new_stack_level ();
7825 fixup_args_size_notes (prev, get_last_insn (), 0);
7828 /* Emit code to save the current value of stack. */
7830 static rtx
7831 expand_stack_save (void)
7833 rtx ret = NULL_RTX;
7835 emit_stack_save (SAVE_BLOCK, &ret);
7836 return ret;
7839 /* Emit code to get the openacc gang, worker or vector id or size. */
7841 static rtx
7842 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
7844 const char *name;
7845 rtx fallback_retval;
7846 rtx_insn *(*gen_fn) (rtx, rtx);
7847 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
7849 case BUILT_IN_GOACC_PARLEVEL_ID:
7850 name = "__builtin_goacc_parlevel_id";
7851 fallback_retval = const0_rtx;
7852 gen_fn = targetm.gen_oacc_dim_pos;
7853 break;
7854 case BUILT_IN_GOACC_PARLEVEL_SIZE:
7855 name = "__builtin_goacc_parlevel_size";
7856 fallback_retval = const1_rtx;
7857 gen_fn = targetm.gen_oacc_dim_size;
7858 break;
7859 default:
7860 gcc_unreachable ();
7863 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
7865 error ("%qs only supported in OpenACC code", name);
7866 return const0_rtx;
7869 tree arg = CALL_EXPR_ARG (exp, 0);
7870 if (TREE_CODE (arg) != INTEGER_CST)
7872 error ("non-constant argument 0 to %qs", name);
7873 return const0_rtx;
7876 int dim = TREE_INT_CST_LOW (arg);
7877 switch (dim)
7879 case GOMP_DIM_GANG:
7880 case GOMP_DIM_WORKER:
7881 case GOMP_DIM_VECTOR:
7882 break;
7883 default:
7884 error ("illegal argument 0 to %qs", name);
7885 return const0_rtx;
7888 if (ignore)
7889 return target;
7891 if (target == NULL_RTX)
7892 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7894 if (!targetm.have_oacc_dim_size ())
7896 emit_move_insn (target, fallback_retval);
7897 return target;
7900 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
7901 emit_insn (gen_fn (reg, GEN_INT (dim)));
7902 if (reg != target)
7903 emit_move_insn (target, reg);
7905 return target;
7908 /* Expand a string compare operation using a sequence of char comparison
7909 to get rid of the calling overhead, with result going to TARGET if
7910 that's convenient.
7912 VAR_STR is the variable string source;
7913 CONST_STR is the constant string source;
7914 LENGTH is the number of chars to compare;
7915 CONST_STR_N indicates which source string is the constant string;
7916 IS_MEMCMP indicates whether it's a memcmp or strcmp.
7918 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7920 target = (int) (unsigned char) var_str[0]
7921 - (int) (unsigned char) const_str[0];
7922 if (target != 0)
7923 goto ne_label;
7925 target = (int) (unsigned char) var_str[length - 2]
7926 - (int) (unsigned char) const_str[length - 2];
7927 if (target != 0)
7928 goto ne_label;
7929 target = (int) (unsigned char) var_str[length - 1]
7930 - (int) (unsigned char) const_str[length - 1];
7931 ne_label:
7934 static rtx
7935 inline_string_cmp (rtx target, tree var_str, const char *const_str,
7936 unsigned HOST_WIDE_INT length,
7937 int const_str_n, machine_mode mode)
7939 HOST_WIDE_INT offset = 0;
7940 rtx var_rtx_array
7941 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
7942 rtx var_rtx = NULL_RTX;
7943 rtx const_rtx = NULL_RTX;
7944 rtx result = target ? target : gen_reg_rtx (mode);
7945 rtx_code_label *ne_label = gen_label_rtx ();
7946 tree unit_type_node = unsigned_char_type_node;
7947 scalar_int_mode unit_mode
7948 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
7950 start_sequence ();
7952 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
7954 var_rtx
7955 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
7956 const_rtx = c_readstr (const_str + offset, unit_mode);
7957 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
7958 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
7960 op0 = convert_modes (mode, unit_mode, op0, 1);
7961 op1 = convert_modes (mode, unit_mode, op1, 1);
7962 result = expand_simple_binop (mode, MINUS, op0, op1,
7963 result, 1, OPTAB_WIDEN);
7964 if (i < length - 1)
7965 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
7966 mode, true, ne_label);
7967 offset += GET_MODE_SIZE (unit_mode);
7970 emit_label (ne_label);
7971 rtx_insn *insns = get_insns ();
7972 end_sequence ();
7973 emit_insn (insns);
7975 return result;
7978 /* Inline expansion of a call to str(n)cmp and memcmp, with result going
7979 to TARGET if that's convenient.
7980 If the call is not been inlined, return NULL_RTX. */
7982 static rtx
7983 inline_expand_builtin_bytecmp (tree exp, rtx target)
7985 tree fndecl = get_callee_fndecl (exp);
7986 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7987 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
7989 /* Do NOT apply this inlining expansion when optimizing for size or
7990 optimization level below 2. */
7991 if (optimize < 2 || optimize_insn_for_size_p ())
7992 return NULL_RTX;
7994 gcc_checking_assert (fcode == BUILT_IN_STRCMP
7995 || fcode == BUILT_IN_STRNCMP
7996 || fcode == BUILT_IN_MEMCMP);
7998 /* On a target where the type of the call (int) has same or narrower presicion
7999 than unsigned char, give up the inlining expansion. */
8000 if (TYPE_PRECISION (unsigned_char_type_node)
8001 >= TYPE_PRECISION (TREE_TYPE (exp)))
8002 return NULL_RTX;
8004 tree arg1 = CALL_EXPR_ARG (exp, 0);
8005 tree arg2 = CALL_EXPR_ARG (exp, 1);
8006 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
8008 unsigned HOST_WIDE_INT len1 = 0;
8009 unsigned HOST_WIDE_INT len2 = 0;
8010 unsigned HOST_WIDE_INT len3 = 0;
8012 /* Get the object representation of the initializers of ARG1 and ARG2
8013 as strings, provided they refer to constant objects, with their byte
8014 sizes in LEN1 and LEN2, respectively. */
8015 const char *bytes1 = getbyterep (arg1, &len1);
8016 const char *bytes2 = getbyterep (arg2, &len2);
8018 /* Fail if neither argument refers to an initialized constant. */
8019 if (!bytes1 && !bytes2)
8020 return NULL_RTX;
8022 if (is_ncmp)
8024 /* Fail if the memcmp/strncmp bound is not a constant. */
8025 if (!tree_fits_uhwi_p (len3_tree))
8026 return NULL_RTX;
8028 len3 = tree_to_uhwi (len3_tree);
8030 if (fcode == BUILT_IN_MEMCMP)
8032 /* Fail if the memcmp bound is greater than the size of either
8033 of the two constant objects. */
8034 if ((bytes1 && len1 < len3)
8035 || (bytes2 && len2 < len3))
8036 return NULL_RTX;
8040 if (fcode != BUILT_IN_MEMCMP)
8042 /* For string functions (i.e., strcmp and strncmp) reduce LEN1
8043 and LEN2 to the length of the nul-terminated string stored
8044 in each. */
8045 if (bytes1 != NULL)
8046 len1 = strnlen (bytes1, len1) + 1;
8047 if (bytes2 != NULL)
8048 len2 = strnlen (bytes2, len2) + 1;
8051 /* See inline_string_cmp. */
8052 int const_str_n;
8053 if (!len1)
8054 const_str_n = 2;
8055 else if (!len2)
8056 const_str_n = 1;
8057 else if (len2 > len1)
8058 const_str_n = 1;
8059 else
8060 const_str_n = 2;
8062 /* For strncmp only, compute the new bound as the smallest of
8063 the lengths of the two strings (plus 1) and the bound provided
8064 to the function. */
8065 unsigned HOST_WIDE_INT bound = (const_str_n == 1) ? len1 : len2;
8066 if (is_ncmp && len3 < bound)
8067 bound = len3;
8069 /* If the bound of the comparison is larger than the threshold,
8070 do nothing. */
8071 if (bound > (unsigned HOST_WIDE_INT) param_builtin_string_cmp_inline_length)
8072 return NULL_RTX;
8074 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8076 /* Now, start inline expansion the call. */
8077 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
8078 (const_str_n == 1) ? bytes1 : bytes2, bound,
8079 const_str_n, mode);
8082 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
8083 represents the size of the first argument to that call, or VOIDmode
8084 if the argument is a pointer. IGNORE will be true if the result
8085 isn't used. */
8086 static rtx
8087 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
8088 bool ignore)
8090 rtx val, failsafe;
8091 unsigned nargs = call_expr_nargs (exp);
8093 tree arg0 = CALL_EXPR_ARG (exp, 0);
8095 if (mode == VOIDmode)
8097 mode = TYPE_MODE (TREE_TYPE (arg0));
8098 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
8101 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
8103 /* An optional second argument can be used as a failsafe value on
8104 some machines. If it isn't present, then the failsafe value is
8105 assumed to be 0. */
8106 if (nargs > 1)
8108 tree arg1 = CALL_EXPR_ARG (exp, 1);
8109 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
8111 else
8112 failsafe = const0_rtx;
8114 /* If the result isn't used, the behavior is undefined. It would be
8115 nice to emit a warning here, but path splitting means this might
8116 happen with legitimate code. So simply drop the builtin
8117 expansion in that case; we've handled any side-effects above. */
8118 if (ignore)
8119 return const0_rtx;
8121 /* If we don't have a suitable target, create one to hold the result. */
8122 if (target == NULL || GET_MODE (target) != mode)
8123 target = gen_reg_rtx (mode);
8125 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
8126 val = convert_modes (mode, VOIDmode, val, false);
8128 return targetm.speculation_safe_value (mode, target, val, failsafe);
8131 /* Expand an expression EXP that calls a built-in function,
8132 with result going to TARGET if that's convenient
8133 (and in mode MODE if that's convenient).
8134 SUBTARGET may be used as the target for computing one of EXP's operands.
8135 IGNORE is nonzero if the value is to be ignored. */
8138 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
8139 int ignore)
8141 tree fndecl = get_callee_fndecl (exp);
8142 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
8143 int flags;
8145 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8146 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
8148 /* When ASan is enabled, we don't want to expand some memory/string
8149 builtins and rely on libsanitizer's hooks. This allows us to avoid
8150 redundant checks and be sure, that possible overflow will be detected
8151 by ASan. */
8153 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8154 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
8155 return expand_call (exp, target, ignore);
8157 /* When not optimizing, generate calls to library functions for a certain
8158 set of builtins. */
8159 if (!optimize
8160 && !called_as_built_in (fndecl)
8161 && fcode != BUILT_IN_FORK
8162 && fcode != BUILT_IN_EXECL
8163 && fcode != BUILT_IN_EXECV
8164 && fcode != BUILT_IN_EXECLP
8165 && fcode != BUILT_IN_EXECLE
8166 && fcode != BUILT_IN_EXECVP
8167 && fcode != BUILT_IN_EXECVE
8168 && !ALLOCA_FUNCTION_CODE_P (fcode)
8169 && fcode != BUILT_IN_FREE)
8170 return expand_call (exp, target, ignore);
8172 /* The built-in function expanders test for target == const0_rtx
8173 to determine whether the function's result will be ignored. */
8174 if (ignore)
8175 target = const0_rtx;
8177 /* If the result of a pure or const built-in function is ignored, and
8178 none of its arguments are volatile, we can avoid expanding the
8179 built-in call and just evaluate the arguments for side-effects. */
8180 if (target == const0_rtx
8181 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
8182 && !(flags & ECF_LOOPING_CONST_OR_PURE))
8184 bool volatilep = false;
8185 tree arg;
8186 call_expr_arg_iterator iter;
8188 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
8189 if (TREE_THIS_VOLATILE (arg))
8191 volatilep = true;
8192 break;
8195 if (! volatilep)
8197 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
8198 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
8199 return const0_rtx;
8203 switch (fcode)
8205 CASE_FLT_FN (BUILT_IN_FABS):
8206 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8207 case BUILT_IN_FABSD32:
8208 case BUILT_IN_FABSD64:
8209 case BUILT_IN_FABSD128:
8210 target = expand_builtin_fabs (exp, target, subtarget);
8211 if (target)
8212 return target;
8213 break;
8215 CASE_FLT_FN (BUILT_IN_COPYSIGN):
8216 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
8217 target = expand_builtin_copysign (exp, target, subtarget);
8218 if (target)
8219 return target;
8220 break;
8222 /* Just do a normal library call if we were unable to fold
8223 the values. */
8224 CASE_FLT_FN (BUILT_IN_CABS):
8225 break;
8227 CASE_FLT_FN (BUILT_IN_FMA):
8228 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
8229 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
8230 if (target)
8231 return target;
8232 break;
8234 CASE_FLT_FN (BUILT_IN_ILOGB):
8235 if (! flag_unsafe_math_optimizations)
8236 break;
8237 gcc_fallthrough ();
8238 CASE_FLT_FN (BUILT_IN_ISINF):
8239 CASE_FLT_FN (BUILT_IN_FINITE):
8240 case BUILT_IN_ISFINITE:
8241 case BUILT_IN_ISNORMAL:
8242 target = expand_builtin_interclass_mathfn (exp, target);
8243 if (target)
8244 return target;
8245 break;
8247 CASE_FLT_FN (BUILT_IN_ICEIL):
8248 CASE_FLT_FN (BUILT_IN_LCEIL):
8249 CASE_FLT_FN (BUILT_IN_LLCEIL):
8250 CASE_FLT_FN (BUILT_IN_LFLOOR):
8251 CASE_FLT_FN (BUILT_IN_IFLOOR):
8252 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8253 target = expand_builtin_int_roundingfn (exp, target);
8254 if (target)
8255 return target;
8256 break;
8258 CASE_FLT_FN (BUILT_IN_IRINT):
8259 CASE_FLT_FN (BUILT_IN_LRINT):
8260 CASE_FLT_FN (BUILT_IN_LLRINT):
8261 CASE_FLT_FN (BUILT_IN_IROUND):
8262 CASE_FLT_FN (BUILT_IN_LROUND):
8263 CASE_FLT_FN (BUILT_IN_LLROUND):
8264 target = expand_builtin_int_roundingfn_2 (exp, target);
8265 if (target)
8266 return target;
8267 break;
8269 CASE_FLT_FN (BUILT_IN_POWI):
8270 target = expand_builtin_powi (exp, target);
8271 if (target)
8272 return target;
8273 break;
8275 CASE_FLT_FN (BUILT_IN_CEXPI):
8276 target = expand_builtin_cexpi (exp, target);
8277 gcc_assert (target);
8278 return target;
8280 CASE_FLT_FN (BUILT_IN_SIN):
8281 CASE_FLT_FN (BUILT_IN_COS):
8282 if (! flag_unsafe_math_optimizations)
8283 break;
8284 target = expand_builtin_mathfn_3 (exp, target, subtarget);
8285 if (target)
8286 return target;
8287 break;
8289 CASE_FLT_FN (BUILT_IN_SINCOS):
8290 if (! flag_unsafe_math_optimizations)
8291 break;
8292 target = expand_builtin_sincos (exp);
8293 if (target)
8294 return target;
8295 break;
8297 case BUILT_IN_APPLY_ARGS:
8298 return expand_builtin_apply_args ();
8300 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8301 FUNCTION with a copy of the parameters described by
8302 ARGUMENTS, and ARGSIZE. It returns a block of memory
8303 allocated on the stack into which is stored all the registers
8304 that might possibly be used for returning the result of a
8305 function. ARGUMENTS is the value returned by
8306 __builtin_apply_args. ARGSIZE is the number of bytes of
8307 arguments that must be copied. ??? How should this value be
8308 computed? We'll also need a safe worst case value for varargs
8309 functions. */
8310 case BUILT_IN_APPLY:
8311 if (!validate_arglist (exp, POINTER_TYPE,
8312 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
8313 && !validate_arglist (exp, REFERENCE_TYPE,
8314 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8315 return const0_rtx;
8316 else
8318 rtx ops[3];
8320 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
8321 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
8322 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
8324 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8327 /* __builtin_return (RESULT) causes the function to return the
8328 value described by RESULT. RESULT is address of the block of
8329 memory returned by __builtin_apply. */
8330 case BUILT_IN_RETURN:
8331 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8332 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
8333 return const0_rtx;
8335 case BUILT_IN_SAVEREGS:
8336 return expand_builtin_saveregs ();
8338 case BUILT_IN_VA_ARG_PACK:
8339 /* All valid uses of __builtin_va_arg_pack () are removed during
8340 inlining. */
8341 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
8342 return const0_rtx;
8344 case BUILT_IN_VA_ARG_PACK_LEN:
8345 /* All valid uses of __builtin_va_arg_pack_len () are removed during
8346 inlining. */
8347 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
8348 return const0_rtx;
8350 /* Return the address of the first anonymous stack arg. */
8351 case BUILT_IN_NEXT_ARG:
8352 if (fold_builtin_next_arg (exp, false))
8353 return const0_rtx;
8354 return expand_builtin_next_arg ();
8356 case BUILT_IN_CLEAR_CACHE:
8357 target = expand_builtin___clear_cache (exp);
8358 if (target)
8359 return target;
8360 break;
8362 case BUILT_IN_CLASSIFY_TYPE:
8363 return expand_builtin_classify_type (exp);
8365 case BUILT_IN_CONSTANT_P:
8366 return const0_rtx;
8368 case BUILT_IN_FRAME_ADDRESS:
8369 case BUILT_IN_RETURN_ADDRESS:
8370 return expand_builtin_frame_address (fndecl, exp);
8372 /* Returns the address of the area where the structure is returned.
8373 0 otherwise. */
8374 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
8375 if (call_expr_nargs (exp) != 0
8376 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
8377 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
8378 return const0_rtx;
8379 else
8380 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
8382 CASE_BUILT_IN_ALLOCA:
8383 target = expand_builtin_alloca (exp);
8384 if (target)
8385 return target;
8386 break;
8388 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
8389 return expand_asan_emit_allocas_unpoison (exp);
8391 case BUILT_IN_STACK_SAVE:
8392 return expand_stack_save ();
8394 case BUILT_IN_STACK_RESTORE:
8395 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
8396 return const0_rtx;
8398 case BUILT_IN_BSWAP16:
8399 case BUILT_IN_BSWAP32:
8400 case BUILT_IN_BSWAP64:
8401 case BUILT_IN_BSWAP128:
8402 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
8403 if (target)
8404 return target;
8405 break;
8407 CASE_INT_FN (BUILT_IN_FFS):
8408 target = expand_builtin_unop (target_mode, exp, target,
8409 subtarget, ffs_optab);
8410 if (target)
8411 return target;
8412 break;
8414 CASE_INT_FN (BUILT_IN_CLZ):
8415 target = expand_builtin_unop (target_mode, exp, target,
8416 subtarget, clz_optab);
8417 if (target)
8418 return target;
8419 break;
8421 CASE_INT_FN (BUILT_IN_CTZ):
8422 target = expand_builtin_unop (target_mode, exp, target,
8423 subtarget, ctz_optab);
8424 if (target)
8425 return target;
8426 break;
8428 CASE_INT_FN (BUILT_IN_CLRSB):
8429 target = expand_builtin_unop (target_mode, exp, target,
8430 subtarget, clrsb_optab);
8431 if (target)
8432 return target;
8433 break;
8435 CASE_INT_FN (BUILT_IN_POPCOUNT):
8436 target = expand_builtin_unop (target_mode, exp, target,
8437 subtarget, popcount_optab);
8438 if (target)
8439 return target;
8440 break;
8442 CASE_INT_FN (BUILT_IN_PARITY):
8443 target = expand_builtin_unop (target_mode, exp, target,
8444 subtarget, parity_optab);
8445 if (target)
8446 return target;
8447 break;
8449 case BUILT_IN_STRLEN:
8450 target = expand_builtin_strlen (exp, target, target_mode);
8451 if (target)
8452 return target;
8453 break;
8455 case BUILT_IN_STRNLEN:
8456 target = expand_builtin_strnlen (exp, target, target_mode);
8457 if (target)
8458 return target;
8459 break;
8461 case BUILT_IN_STRCAT:
8462 target = expand_builtin_strcat (exp);
8463 if (target)
8464 return target;
8465 break;
8467 case BUILT_IN_GETTEXT:
8468 case BUILT_IN_PUTS:
8469 case BUILT_IN_PUTS_UNLOCKED:
8470 case BUILT_IN_STRDUP:
8471 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8472 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
8473 break;
8475 case BUILT_IN_INDEX:
8476 case BUILT_IN_RINDEX:
8477 case BUILT_IN_STRCHR:
8478 case BUILT_IN_STRRCHR:
8479 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8480 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
8481 break;
8483 case BUILT_IN_FPUTS:
8484 case BUILT_IN_FPUTS_UNLOCKED:
8485 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8486 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
8487 break;
8489 case BUILT_IN_STRNDUP:
8490 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8491 check_read_access (exp, CALL_EXPR_ARG (exp, 0), CALL_EXPR_ARG (exp, 1));
8492 break;
8494 case BUILT_IN_STRCASECMP:
8495 case BUILT_IN_STRPBRK:
8496 case BUILT_IN_STRSPN:
8497 case BUILT_IN_STRCSPN:
8498 case BUILT_IN_STRSTR:
8499 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8501 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
8502 check_read_access (exp, CALL_EXPR_ARG (exp, 1));
8504 break;
8506 case BUILT_IN_STRCPY:
8507 target = expand_builtin_strcpy (exp, target);
8508 if (target)
8509 return target;
8510 break;
8512 case BUILT_IN_STRNCAT:
8513 target = expand_builtin_strncat (exp, target);
8514 if (target)
8515 return target;
8516 break;
8518 case BUILT_IN_STRNCPY:
8519 target = expand_builtin_strncpy (exp, target);
8520 if (target)
8521 return target;
8522 break;
8524 case BUILT_IN_STPCPY:
8525 target = expand_builtin_stpcpy (exp, target, mode);
8526 if (target)
8527 return target;
8528 break;
8530 case BUILT_IN_STPNCPY:
8531 target = expand_builtin_stpncpy (exp, target);
8532 if (target)
8533 return target;
8534 break;
8536 case BUILT_IN_MEMCHR:
8537 target = expand_builtin_memchr (exp, target);
8538 if (target)
8539 return target;
8540 break;
8542 case BUILT_IN_MEMCPY:
8543 target = expand_builtin_memcpy (exp, target);
8544 if (target)
8545 return target;
8546 break;
8548 case BUILT_IN_MEMMOVE:
8549 target = expand_builtin_memmove (exp, target);
8550 if (target)
8551 return target;
8552 break;
8554 case BUILT_IN_MEMPCPY:
8555 target = expand_builtin_mempcpy (exp, target);
8556 if (target)
8557 return target;
8558 break;
8560 case BUILT_IN_MEMSET:
8561 target = expand_builtin_memset (exp, target, mode);
8562 if (target)
8563 return target;
8564 break;
8566 case BUILT_IN_BZERO:
8567 target = expand_builtin_bzero (exp);
8568 if (target)
8569 return target;
8570 break;
8572 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
8573 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
8574 when changing it to a strcmp call. */
8575 case BUILT_IN_STRCMP_EQ:
8576 target = expand_builtin_memcmp (exp, target, true);
8577 if (target)
8578 return target;
8580 /* Change this call back to a BUILT_IN_STRCMP. */
8581 TREE_OPERAND (exp, 1)
8582 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
8584 /* Delete the last parameter. */
8585 unsigned int i;
8586 vec<tree, va_gc> *arg_vec;
8587 vec_alloc (arg_vec, 2);
8588 for (i = 0; i < 2; i++)
8589 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
8590 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
8591 /* FALLTHROUGH */
8593 case BUILT_IN_STRCMP:
8594 target = expand_builtin_strcmp (exp, target);
8595 if (target)
8596 return target;
8597 break;
8599 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
8600 back to a BUILT_IN_STRNCMP. */
8601 case BUILT_IN_STRNCMP_EQ:
8602 target = expand_builtin_memcmp (exp, target, true);
8603 if (target)
8604 return target;
8606 /* Change it back to a BUILT_IN_STRNCMP. */
8607 TREE_OPERAND (exp, 1)
8608 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
8609 /* FALLTHROUGH */
8611 case BUILT_IN_STRNCMP:
8612 target = expand_builtin_strncmp (exp, target, mode);
8613 if (target)
8614 return target;
8615 break;
8617 case BUILT_IN_BCMP:
8618 case BUILT_IN_MEMCMP:
8619 case BUILT_IN_MEMCMP_EQ:
8620 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
8621 if (target)
8622 return target;
8623 if (fcode == BUILT_IN_MEMCMP_EQ)
8625 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
8626 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
8628 break;
8630 case BUILT_IN_SETJMP:
8631 /* This should have been lowered to the builtins below. */
8632 gcc_unreachable ();
8634 case BUILT_IN_SETJMP_SETUP:
8635 /* __builtin_setjmp_setup is passed a pointer to an array of five words
8636 and the receiver label. */
8637 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8639 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
8640 VOIDmode, EXPAND_NORMAL);
8641 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
8642 rtx_insn *label_r = label_rtx (label);
8644 /* This is copied from the handling of non-local gotos. */
8645 expand_builtin_setjmp_setup (buf_addr, label_r);
8646 nonlocal_goto_handler_labels
8647 = gen_rtx_INSN_LIST (VOIDmode, label_r,
8648 nonlocal_goto_handler_labels);
8649 /* ??? Do not let expand_label treat us as such since we would
8650 not want to be both on the list of non-local labels and on
8651 the list of forced labels. */
8652 FORCED_LABEL (label) = 0;
8653 return const0_rtx;
8655 break;
8657 case BUILT_IN_SETJMP_RECEIVER:
8658 /* __builtin_setjmp_receiver is passed the receiver label. */
8659 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8661 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
8662 rtx_insn *label_r = label_rtx (label);
8664 expand_builtin_setjmp_receiver (label_r);
8665 return const0_rtx;
8667 break;
8669 /* __builtin_longjmp is passed a pointer to an array of five words.
8670 It's similar to the C library longjmp function but works with
8671 __builtin_setjmp above. */
8672 case BUILT_IN_LONGJMP:
8673 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8675 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
8676 VOIDmode, EXPAND_NORMAL);
8677 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
8679 if (value != const1_rtx)
8681 error ("%<__builtin_longjmp%> second argument must be 1");
8682 return const0_rtx;
8685 expand_builtin_longjmp (buf_addr, value);
8686 return const0_rtx;
8688 break;
8690 case BUILT_IN_NONLOCAL_GOTO:
8691 target = expand_builtin_nonlocal_goto (exp);
8692 if (target)
8693 return target;
8694 break;
8696 /* This updates the setjmp buffer that is its argument with the value
8697 of the current stack pointer. */
8698 case BUILT_IN_UPDATE_SETJMP_BUF:
8699 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8701 rtx buf_addr
8702 = expand_normal (CALL_EXPR_ARG (exp, 0));
8704 expand_builtin_update_setjmp_buf (buf_addr);
8705 return const0_rtx;
8707 break;
8709 case BUILT_IN_TRAP:
8710 expand_builtin_trap ();
8711 return const0_rtx;
8713 case BUILT_IN_UNREACHABLE:
8714 expand_builtin_unreachable ();
8715 return const0_rtx;
8717 CASE_FLT_FN (BUILT_IN_SIGNBIT):
8718 case BUILT_IN_SIGNBITD32:
8719 case BUILT_IN_SIGNBITD64:
8720 case BUILT_IN_SIGNBITD128:
8721 target = expand_builtin_signbit (exp, target);
8722 if (target)
8723 return target;
8724 break;
8726 /* Various hooks for the DWARF 2 __throw routine. */
8727 case BUILT_IN_UNWIND_INIT:
8728 expand_builtin_unwind_init ();
8729 return const0_rtx;
8730 case BUILT_IN_DWARF_CFA:
8731 return virtual_cfa_rtx;
8732 #ifdef DWARF2_UNWIND_INFO
8733 case BUILT_IN_DWARF_SP_COLUMN:
8734 return expand_builtin_dwarf_sp_column ();
8735 case BUILT_IN_INIT_DWARF_REG_SIZES:
8736 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
8737 return const0_rtx;
8738 #endif
8739 case BUILT_IN_FROB_RETURN_ADDR:
8740 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
8741 case BUILT_IN_EXTRACT_RETURN_ADDR:
8742 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
8743 case BUILT_IN_EH_RETURN:
8744 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
8745 CALL_EXPR_ARG (exp, 1));
8746 return const0_rtx;
8747 case BUILT_IN_EH_RETURN_DATA_REGNO:
8748 return expand_builtin_eh_return_data_regno (exp);
8749 case BUILT_IN_EXTEND_POINTER:
8750 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
8751 case BUILT_IN_EH_POINTER:
8752 return expand_builtin_eh_pointer (exp);
8753 case BUILT_IN_EH_FILTER:
8754 return expand_builtin_eh_filter (exp);
8755 case BUILT_IN_EH_COPY_VALUES:
8756 return expand_builtin_eh_copy_values (exp);
8758 case BUILT_IN_VA_START:
8759 return expand_builtin_va_start (exp);
8760 case BUILT_IN_VA_END:
8761 return expand_builtin_va_end (exp);
8762 case BUILT_IN_VA_COPY:
8763 return expand_builtin_va_copy (exp);
8764 case BUILT_IN_EXPECT:
8765 return expand_builtin_expect (exp, target);
8766 case BUILT_IN_EXPECT_WITH_PROBABILITY:
8767 return expand_builtin_expect_with_probability (exp, target);
8768 case BUILT_IN_ASSUME_ALIGNED:
8769 return expand_builtin_assume_aligned (exp, target);
8770 case BUILT_IN_PREFETCH:
8771 expand_builtin_prefetch (exp);
8772 return const0_rtx;
8774 case BUILT_IN_INIT_TRAMPOLINE:
8775 return expand_builtin_init_trampoline (exp, true);
8776 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
8777 return expand_builtin_init_trampoline (exp, false);
8778 case BUILT_IN_ADJUST_TRAMPOLINE:
8779 return expand_builtin_adjust_trampoline (exp);
8781 case BUILT_IN_INIT_DESCRIPTOR:
8782 return expand_builtin_init_descriptor (exp);
8783 case BUILT_IN_ADJUST_DESCRIPTOR:
8784 return expand_builtin_adjust_descriptor (exp);
8786 case BUILT_IN_FORK:
8787 case BUILT_IN_EXECL:
8788 case BUILT_IN_EXECV:
8789 case BUILT_IN_EXECLP:
8790 case BUILT_IN_EXECLE:
8791 case BUILT_IN_EXECVP:
8792 case BUILT_IN_EXECVE:
8793 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
8794 if (target)
8795 return target;
8796 break;
8798 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
8799 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
8800 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
8801 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
8802 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
8803 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
8804 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
8805 if (target)
8806 return target;
8807 break;
8809 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
8810 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
8811 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
8812 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
8813 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
8814 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
8815 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
8816 if (target)
8817 return target;
8818 break;
8820 case BUILT_IN_SYNC_FETCH_AND_OR_1:
8821 case BUILT_IN_SYNC_FETCH_AND_OR_2:
8822 case BUILT_IN_SYNC_FETCH_AND_OR_4:
8823 case BUILT_IN_SYNC_FETCH_AND_OR_8:
8824 case BUILT_IN_SYNC_FETCH_AND_OR_16:
8825 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
8826 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
8827 if (target)
8828 return target;
8829 break;
8831 case BUILT_IN_SYNC_FETCH_AND_AND_1:
8832 case BUILT_IN_SYNC_FETCH_AND_AND_2:
8833 case BUILT_IN_SYNC_FETCH_AND_AND_4:
8834 case BUILT_IN_SYNC_FETCH_AND_AND_8:
8835 case BUILT_IN_SYNC_FETCH_AND_AND_16:
8836 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
8837 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
8838 if (target)
8839 return target;
8840 break;
8842 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
8843 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
8844 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
8845 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
8846 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
8847 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
8848 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
8849 if (target)
8850 return target;
8851 break;
8853 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
8854 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
8855 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
8856 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
8857 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
8858 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
8859 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
8860 if (target)
8861 return target;
8862 break;
8864 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
8865 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
8866 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
8867 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
8868 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
8869 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
8870 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
8871 if (target)
8872 return target;
8873 break;
8875 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
8876 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
8877 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
8878 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
8879 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
8880 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
8881 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
8882 if (target)
8883 return target;
8884 break;
8886 case BUILT_IN_SYNC_OR_AND_FETCH_1:
8887 case BUILT_IN_SYNC_OR_AND_FETCH_2:
8888 case BUILT_IN_SYNC_OR_AND_FETCH_4:
8889 case BUILT_IN_SYNC_OR_AND_FETCH_8:
8890 case BUILT_IN_SYNC_OR_AND_FETCH_16:
8891 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
8892 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
8893 if (target)
8894 return target;
8895 break;
8897 case BUILT_IN_SYNC_AND_AND_FETCH_1:
8898 case BUILT_IN_SYNC_AND_AND_FETCH_2:
8899 case BUILT_IN_SYNC_AND_AND_FETCH_4:
8900 case BUILT_IN_SYNC_AND_AND_FETCH_8:
8901 case BUILT_IN_SYNC_AND_AND_FETCH_16:
8902 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
8903 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
8904 if (target)
8905 return target;
8906 break;
8908 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
8909 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
8910 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
8911 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
8912 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
8913 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
8914 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
8915 if (target)
8916 return target;
8917 break;
8919 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
8920 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
8921 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
8922 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
8923 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
8924 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
8925 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
8926 if (target)
8927 return target;
8928 break;
8930 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
8931 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
8932 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
8933 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
8934 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
8935 if (mode == VOIDmode)
8936 mode = TYPE_MODE (boolean_type_node);
8937 if (!target || !register_operand (target, mode))
8938 target = gen_reg_rtx (mode);
8940 mode = get_builtin_sync_mode
8941 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
8942 target = expand_builtin_compare_and_swap (mode, exp, true, target);
8943 if (target)
8944 return target;
8945 break;
8947 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
8948 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
8949 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
8950 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
8951 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
8952 mode = get_builtin_sync_mode
8953 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
8954 target = expand_builtin_compare_and_swap (mode, exp, false, target);
8955 if (target)
8956 return target;
8957 break;
8959 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
8960 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
8961 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
8962 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
8963 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
8964 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
8965 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
8966 if (target)
8967 return target;
8968 break;
8970 case BUILT_IN_SYNC_LOCK_RELEASE_1:
8971 case BUILT_IN_SYNC_LOCK_RELEASE_2:
8972 case BUILT_IN_SYNC_LOCK_RELEASE_4:
8973 case BUILT_IN_SYNC_LOCK_RELEASE_8:
8974 case BUILT_IN_SYNC_LOCK_RELEASE_16:
8975 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
8976 expand_builtin_sync_lock_release (mode, exp);
8977 return const0_rtx;
8979 case BUILT_IN_SYNC_SYNCHRONIZE:
8980 expand_builtin_sync_synchronize ();
8981 return const0_rtx;
8983 case BUILT_IN_ATOMIC_EXCHANGE_1:
8984 case BUILT_IN_ATOMIC_EXCHANGE_2:
8985 case BUILT_IN_ATOMIC_EXCHANGE_4:
8986 case BUILT_IN_ATOMIC_EXCHANGE_8:
8987 case BUILT_IN_ATOMIC_EXCHANGE_16:
8988 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
8989 target = expand_builtin_atomic_exchange (mode, exp, target);
8990 if (target)
8991 return target;
8992 break;
8994 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
8995 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
8996 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
8997 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
8998 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
9000 unsigned int nargs, z;
9001 vec<tree, va_gc> *vec;
9003 mode =
9004 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
9005 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
9006 if (target)
9007 return target;
9009 /* If this is turned into an external library call, the weak parameter
9010 must be dropped to match the expected parameter list. */
9011 nargs = call_expr_nargs (exp);
9012 vec_alloc (vec, nargs - 1);
9013 for (z = 0; z < 3; z++)
9014 vec->quick_push (CALL_EXPR_ARG (exp, z));
9015 /* Skip the boolean weak parameter. */
9016 for (z = 4; z < 6; z++)
9017 vec->quick_push (CALL_EXPR_ARG (exp, z));
9018 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
9019 break;
9022 case BUILT_IN_ATOMIC_LOAD_1:
9023 case BUILT_IN_ATOMIC_LOAD_2:
9024 case BUILT_IN_ATOMIC_LOAD_4:
9025 case BUILT_IN_ATOMIC_LOAD_8:
9026 case BUILT_IN_ATOMIC_LOAD_16:
9027 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
9028 target = expand_builtin_atomic_load (mode, exp, target);
9029 if (target)
9030 return target;
9031 break;
9033 case BUILT_IN_ATOMIC_STORE_1:
9034 case BUILT_IN_ATOMIC_STORE_2:
9035 case BUILT_IN_ATOMIC_STORE_4:
9036 case BUILT_IN_ATOMIC_STORE_8:
9037 case BUILT_IN_ATOMIC_STORE_16:
9038 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
9039 target = expand_builtin_atomic_store (mode, exp);
9040 if (target)
9041 return const0_rtx;
9042 break;
9044 case BUILT_IN_ATOMIC_ADD_FETCH_1:
9045 case BUILT_IN_ATOMIC_ADD_FETCH_2:
9046 case BUILT_IN_ATOMIC_ADD_FETCH_4:
9047 case BUILT_IN_ATOMIC_ADD_FETCH_8:
9048 case BUILT_IN_ATOMIC_ADD_FETCH_16:
9050 enum built_in_function lib;
9051 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
9052 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
9053 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
9054 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
9055 ignore, lib);
9056 if (target)
9057 return target;
9058 break;
9060 case BUILT_IN_ATOMIC_SUB_FETCH_1:
9061 case BUILT_IN_ATOMIC_SUB_FETCH_2:
9062 case BUILT_IN_ATOMIC_SUB_FETCH_4:
9063 case BUILT_IN_ATOMIC_SUB_FETCH_8:
9064 case BUILT_IN_ATOMIC_SUB_FETCH_16:
9066 enum built_in_function lib;
9067 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
9068 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
9069 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
9070 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
9071 ignore, lib);
9072 if (target)
9073 return target;
9074 break;
9076 case BUILT_IN_ATOMIC_AND_FETCH_1:
9077 case BUILT_IN_ATOMIC_AND_FETCH_2:
9078 case BUILT_IN_ATOMIC_AND_FETCH_4:
9079 case BUILT_IN_ATOMIC_AND_FETCH_8:
9080 case BUILT_IN_ATOMIC_AND_FETCH_16:
9082 enum built_in_function lib;
9083 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
9084 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
9085 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
9086 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
9087 ignore, lib);
9088 if (target)
9089 return target;
9090 break;
9092 case BUILT_IN_ATOMIC_NAND_FETCH_1:
9093 case BUILT_IN_ATOMIC_NAND_FETCH_2:
9094 case BUILT_IN_ATOMIC_NAND_FETCH_4:
9095 case BUILT_IN_ATOMIC_NAND_FETCH_8:
9096 case BUILT_IN_ATOMIC_NAND_FETCH_16:
9098 enum built_in_function lib;
9099 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
9100 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
9101 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
9102 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
9103 ignore, lib);
9104 if (target)
9105 return target;
9106 break;
9108 case BUILT_IN_ATOMIC_XOR_FETCH_1:
9109 case BUILT_IN_ATOMIC_XOR_FETCH_2:
9110 case BUILT_IN_ATOMIC_XOR_FETCH_4:
9111 case BUILT_IN_ATOMIC_XOR_FETCH_8:
9112 case BUILT_IN_ATOMIC_XOR_FETCH_16:
9114 enum built_in_function lib;
9115 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
9116 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
9117 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
9118 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
9119 ignore, lib);
9120 if (target)
9121 return target;
9122 break;
9124 case BUILT_IN_ATOMIC_OR_FETCH_1:
9125 case BUILT_IN_ATOMIC_OR_FETCH_2:
9126 case BUILT_IN_ATOMIC_OR_FETCH_4:
9127 case BUILT_IN_ATOMIC_OR_FETCH_8:
9128 case BUILT_IN_ATOMIC_OR_FETCH_16:
9130 enum built_in_function lib;
9131 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
9132 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
9133 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
9134 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
9135 ignore, lib);
9136 if (target)
9137 return target;
9138 break;
9140 case BUILT_IN_ATOMIC_FETCH_ADD_1:
9141 case BUILT_IN_ATOMIC_FETCH_ADD_2:
9142 case BUILT_IN_ATOMIC_FETCH_ADD_4:
9143 case BUILT_IN_ATOMIC_FETCH_ADD_8:
9144 case BUILT_IN_ATOMIC_FETCH_ADD_16:
9145 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
9146 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
9147 ignore, BUILT_IN_NONE);
9148 if (target)
9149 return target;
9150 break;
9152 case BUILT_IN_ATOMIC_FETCH_SUB_1:
9153 case BUILT_IN_ATOMIC_FETCH_SUB_2:
9154 case BUILT_IN_ATOMIC_FETCH_SUB_4:
9155 case BUILT_IN_ATOMIC_FETCH_SUB_8:
9156 case BUILT_IN_ATOMIC_FETCH_SUB_16:
9157 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
9158 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
9159 ignore, BUILT_IN_NONE);
9160 if (target)
9161 return target;
9162 break;
9164 case BUILT_IN_ATOMIC_FETCH_AND_1:
9165 case BUILT_IN_ATOMIC_FETCH_AND_2:
9166 case BUILT_IN_ATOMIC_FETCH_AND_4:
9167 case BUILT_IN_ATOMIC_FETCH_AND_8:
9168 case BUILT_IN_ATOMIC_FETCH_AND_16:
9169 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
9170 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
9171 ignore, BUILT_IN_NONE);
9172 if (target)
9173 return target;
9174 break;
9176 case BUILT_IN_ATOMIC_FETCH_NAND_1:
9177 case BUILT_IN_ATOMIC_FETCH_NAND_2:
9178 case BUILT_IN_ATOMIC_FETCH_NAND_4:
9179 case BUILT_IN_ATOMIC_FETCH_NAND_8:
9180 case BUILT_IN_ATOMIC_FETCH_NAND_16:
9181 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
9182 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
9183 ignore, BUILT_IN_NONE);
9184 if (target)
9185 return target;
9186 break;
9188 case BUILT_IN_ATOMIC_FETCH_XOR_1:
9189 case BUILT_IN_ATOMIC_FETCH_XOR_2:
9190 case BUILT_IN_ATOMIC_FETCH_XOR_4:
9191 case BUILT_IN_ATOMIC_FETCH_XOR_8:
9192 case BUILT_IN_ATOMIC_FETCH_XOR_16:
9193 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
9194 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
9195 ignore, BUILT_IN_NONE);
9196 if (target)
9197 return target;
9198 break;
9200 case BUILT_IN_ATOMIC_FETCH_OR_1:
9201 case BUILT_IN_ATOMIC_FETCH_OR_2:
9202 case BUILT_IN_ATOMIC_FETCH_OR_4:
9203 case BUILT_IN_ATOMIC_FETCH_OR_8:
9204 case BUILT_IN_ATOMIC_FETCH_OR_16:
9205 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
9206 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
9207 ignore, BUILT_IN_NONE);
9208 if (target)
9209 return target;
9210 break;
9212 case BUILT_IN_ATOMIC_TEST_AND_SET:
9213 return expand_builtin_atomic_test_and_set (exp, target);
9215 case BUILT_IN_ATOMIC_CLEAR:
9216 return expand_builtin_atomic_clear (exp);
9218 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9219 return expand_builtin_atomic_always_lock_free (exp);
9221 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9222 target = expand_builtin_atomic_is_lock_free (exp);
9223 if (target)
9224 return target;
9225 break;
9227 case BUILT_IN_ATOMIC_THREAD_FENCE:
9228 expand_builtin_atomic_thread_fence (exp);
9229 return const0_rtx;
9231 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
9232 expand_builtin_atomic_signal_fence (exp);
9233 return const0_rtx;
9235 case BUILT_IN_OBJECT_SIZE:
9236 return expand_builtin_object_size (exp);
9238 case BUILT_IN_MEMCPY_CHK:
9239 case BUILT_IN_MEMPCPY_CHK:
9240 case BUILT_IN_MEMMOVE_CHK:
9241 case BUILT_IN_MEMSET_CHK:
9242 target = expand_builtin_memory_chk (exp, target, mode, fcode);
9243 if (target)
9244 return target;
9245 break;
9247 case BUILT_IN_STRCPY_CHK:
9248 case BUILT_IN_STPCPY_CHK:
9249 case BUILT_IN_STRNCPY_CHK:
9250 case BUILT_IN_STPNCPY_CHK:
9251 case BUILT_IN_STRCAT_CHK:
9252 case BUILT_IN_STRNCAT_CHK:
9253 case BUILT_IN_SNPRINTF_CHK:
9254 case BUILT_IN_VSNPRINTF_CHK:
9255 maybe_emit_chk_warning (exp, fcode);
9256 break;
9258 case BUILT_IN_SPRINTF_CHK:
9259 case BUILT_IN_VSPRINTF_CHK:
9260 maybe_emit_sprintf_chk_warning (exp, fcode);
9261 break;
9263 case BUILT_IN_FREE:
9264 if (warn_free_nonheap_object)
9265 maybe_emit_free_warning (exp);
9266 break;
9268 case BUILT_IN_THREAD_POINTER:
9269 return expand_builtin_thread_pointer (exp, target);
9271 case BUILT_IN_SET_THREAD_POINTER:
9272 expand_builtin_set_thread_pointer (exp);
9273 return const0_rtx;
9275 case BUILT_IN_ACC_ON_DEVICE:
9276 /* Do library call, if we failed to expand the builtin when
9277 folding. */
9278 break;
9280 case BUILT_IN_GOACC_PARLEVEL_ID:
9281 case BUILT_IN_GOACC_PARLEVEL_SIZE:
9282 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
9284 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
9285 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
9287 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
9288 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
9289 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
9290 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
9291 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
9292 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
9293 return expand_speculation_safe_value (mode, exp, target, ignore);
9295 default: /* just do library call, if unknown builtin */
9296 break;
9299 /* The switch statement above can drop through to cause the function
9300 to be called normally. */
9301 return expand_call (exp, target, ignore);
9304 /* Determine whether a tree node represents a call to a built-in
9305 function. If the tree T is a call to a built-in function with
9306 the right number of arguments of the appropriate types, return
9307 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
9308 Otherwise the return value is END_BUILTINS. */
9310 enum built_in_function
9311 builtin_mathfn_code (const_tree t)
9313 const_tree fndecl, arg, parmlist;
9314 const_tree argtype, parmtype;
9315 const_call_expr_arg_iterator iter;
9317 if (TREE_CODE (t) != CALL_EXPR)
9318 return END_BUILTINS;
9320 fndecl = get_callee_fndecl (t);
9321 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
9322 return END_BUILTINS;
9324 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
9325 init_const_call_expr_arg_iterator (t, &iter);
9326 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
9328 /* If a function doesn't take a variable number of arguments,
9329 the last element in the list will have type `void'. */
9330 parmtype = TREE_VALUE (parmlist);
9331 if (VOID_TYPE_P (parmtype))
9333 if (more_const_call_expr_args_p (&iter))
9334 return END_BUILTINS;
9335 return DECL_FUNCTION_CODE (fndecl);
9338 if (! more_const_call_expr_args_p (&iter))
9339 return END_BUILTINS;
9341 arg = next_const_call_expr_arg (&iter);
9342 argtype = TREE_TYPE (arg);
9344 if (SCALAR_FLOAT_TYPE_P (parmtype))
9346 if (! SCALAR_FLOAT_TYPE_P (argtype))
9347 return END_BUILTINS;
9349 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
9351 if (! COMPLEX_FLOAT_TYPE_P (argtype))
9352 return END_BUILTINS;
9354 else if (POINTER_TYPE_P (parmtype))
9356 if (! POINTER_TYPE_P (argtype))
9357 return END_BUILTINS;
9359 else if (INTEGRAL_TYPE_P (parmtype))
9361 if (! INTEGRAL_TYPE_P (argtype))
9362 return END_BUILTINS;
9364 else
9365 return END_BUILTINS;
9368 /* Variable-length argument list. */
9369 return DECL_FUNCTION_CODE (fndecl);
9372 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
9373 evaluate to a constant. */
9375 static tree
9376 fold_builtin_constant_p (tree arg)
9378 /* We return 1 for a numeric type that's known to be a constant
9379 value at compile-time or for an aggregate type that's a
9380 literal constant. */
9381 STRIP_NOPS (arg);
9383 /* If we know this is a constant, emit the constant of one. */
9384 if (CONSTANT_CLASS_P (arg)
9385 || (TREE_CODE (arg) == CONSTRUCTOR
9386 && TREE_CONSTANT (arg)))
9387 return integer_one_node;
9388 if (TREE_CODE (arg) == ADDR_EXPR)
9390 tree op = TREE_OPERAND (arg, 0);
9391 if (TREE_CODE (op) == STRING_CST
9392 || (TREE_CODE (op) == ARRAY_REF
9393 && integer_zerop (TREE_OPERAND (op, 1))
9394 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
9395 return integer_one_node;
9398 /* If this expression has side effects, show we don't know it to be a
9399 constant. Likewise if it's a pointer or aggregate type since in
9400 those case we only want literals, since those are only optimized
9401 when generating RTL, not later.
9402 And finally, if we are compiling an initializer, not code, we
9403 need to return a definite result now; there's not going to be any
9404 more optimization done. */
9405 if (TREE_SIDE_EFFECTS (arg)
9406 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
9407 || POINTER_TYPE_P (TREE_TYPE (arg))
9408 || cfun == 0
9409 || folding_initializer
9410 || force_folding_builtin_constant_p)
9411 return integer_zero_node;
9413 return NULL_TREE;
9416 /* Create builtin_expect or builtin_expect_with_probability
9417 with PRED and EXPECTED as its arguments and return it as a truthvalue.
9418 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
9419 builtin_expect_with_probability instead uses third argument as PROBABILITY
9420 value. */
9422 static tree
9423 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
9424 tree predictor, tree probability)
9426 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
9428 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
9429 : BUILT_IN_EXPECT_WITH_PROBABILITY);
9430 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
9431 ret_type = TREE_TYPE (TREE_TYPE (fn));
9432 pred_type = TREE_VALUE (arg_types);
9433 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
9435 pred = fold_convert_loc (loc, pred_type, pred);
9436 expected = fold_convert_loc (loc, expected_type, expected);
9438 if (probability)
9439 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
9440 else
9441 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
9442 predictor);
9444 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
9445 build_int_cst (ret_type, 0));
9448 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
9449 NULL_TREE if no simplification is possible. */
9451 tree
9452 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
9453 tree arg3)
9455 tree inner, fndecl, inner_arg0;
9456 enum tree_code code;
9458 /* Distribute the expected value over short-circuiting operators.
9459 See through the cast from truthvalue_type_node to long. */
9460 inner_arg0 = arg0;
9461 while (CONVERT_EXPR_P (inner_arg0)
9462 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
9463 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
9464 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
9466 /* If this is a builtin_expect within a builtin_expect keep the
9467 inner one. See through a comparison against a constant. It
9468 might have been added to create a thruthvalue. */
9469 inner = inner_arg0;
9471 if (COMPARISON_CLASS_P (inner)
9472 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
9473 inner = TREE_OPERAND (inner, 0);
9475 if (TREE_CODE (inner) == CALL_EXPR
9476 && (fndecl = get_callee_fndecl (inner))
9477 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
9478 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
9479 return arg0;
9481 inner = inner_arg0;
9482 code = TREE_CODE (inner);
9483 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
9485 tree op0 = TREE_OPERAND (inner, 0);
9486 tree op1 = TREE_OPERAND (inner, 1);
9487 arg1 = save_expr (arg1);
9489 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
9490 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
9491 inner = build2 (code, TREE_TYPE (inner), op0, op1);
9493 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
9496 /* If the argument isn't invariant then there's nothing else we can do. */
9497 if (!TREE_CONSTANT (inner_arg0))
9498 return NULL_TREE;
9500 /* If we expect that a comparison against the argument will fold to
9501 a constant return the constant. In practice, this means a true
9502 constant or the address of a non-weak symbol. */
9503 inner = inner_arg0;
9504 STRIP_NOPS (inner);
9505 if (TREE_CODE (inner) == ADDR_EXPR)
9509 inner = TREE_OPERAND (inner, 0);
9511 while (TREE_CODE (inner) == COMPONENT_REF
9512 || TREE_CODE (inner) == ARRAY_REF);
9513 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
9514 return NULL_TREE;
9517 /* Otherwise, ARG0 already has the proper type for the return value. */
9518 return arg0;
9521 /* Fold a call to __builtin_classify_type with argument ARG. */
9523 static tree
9524 fold_builtin_classify_type (tree arg)
9526 if (arg == 0)
9527 return build_int_cst (integer_type_node, no_type_class);
9529 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
9532 /* Fold a call EXPR (which may be null) to __builtin_strlen with argument
9533 ARG. */
9535 static tree
9536 fold_builtin_strlen (location_t loc, tree expr, tree type, tree arg)
9538 if (!validate_arg (arg, POINTER_TYPE))
9539 return NULL_TREE;
9540 else
9542 c_strlen_data lendata = { };
9543 tree len = c_strlen (arg, 0, &lendata);
9545 if (len)
9546 return fold_convert_loc (loc, type, len);
9548 if (!lendata.decl)
9549 c_strlen (arg, 1, &lendata);
9551 if (lendata.decl)
9553 if (EXPR_HAS_LOCATION (arg))
9554 loc = EXPR_LOCATION (arg);
9555 else if (loc == UNKNOWN_LOCATION)
9556 loc = input_location;
9557 warn_string_no_nul (loc, expr, "strlen", arg, lendata.decl);
9560 return NULL_TREE;
9564 /* Fold a call to __builtin_inf or __builtin_huge_val. */
9566 static tree
9567 fold_builtin_inf (location_t loc, tree type, int warn)
9569 REAL_VALUE_TYPE real;
9571 /* __builtin_inff is intended to be usable to define INFINITY on all
9572 targets. If an infinity is not available, INFINITY expands "to a
9573 positive constant of type float that overflows at translation
9574 time", footnote "In this case, using INFINITY will violate the
9575 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
9576 Thus we pedwarn to ensure this constraint violation is
9577 diagnosed. */
9578 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
9579 pedwarn (loc, 0, "target format does not support infinity");
9581 real_inf (&real);
9582 return build_real (type, real);
9585 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
9586 NULL_TREE if no simplification can be made. */
9588 static tree
9589 fold_builtin_sincos (location_t loc,
9590 tree arg0, tree arg1, tree arg2)
9592 tree type;
9593 tree fndecl, call = NULL_TREE;
9595 if (!validate_arg (arg0, REAL_TYPE)
9596 || !validate_arg (arg1, POINTER_TYPE)
9597 || !validate_arg (arg2, POINTER_TYPE))
9598 return NULL_TREE;
9600 type = TREE_TYPE (arg0);
9602 /* Calculate the result when the argument is a constant. */
9603 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
9604 if (fn == END_BUILTINS)
9605 return NULL_TREE;
9607 /* Canonicalize sincos to cexpi. */
9608 if (TREE_CODE (arg0) == REAL_CST)
9610 tree complex_type = build_complex_type (type);
9611 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
9613 if (!call)
9615 if (!targetm.libc_has_function (function_c99_math_complex)
9616 || !builtin_decl_implicit_p (fn))
9617 return NULL_TREE;
9618 fndecl = builtin_decl_explicit (fn);
9619 call = build_call_expr_loc (loc, fndecl, 1, arg0);
9620 call = builtin_save_expr (call);
9623 tree ptype = build_pointer_type (type);
9624 arg1 = fold_convert (ptype, arg1);
9625 arg2 = fold_convert (ptype, arg2);
9626 return build2 (COMPOUND_EXPR, void_type_node,
9627 build2 (MODIFY_EXPR, void_type_node,
9628 build_fold_indirect_ref_loc (loc, arg1),
9629 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
9630 build2 (MODIFY_EXPR, void_type_node,
9631 build_fold_indirect_ref_loc (loc, arg2),
9632 fold_build1_loc (loc, REALPART_EXPR, type, call)));
9635 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9636 Return NULL_TREE if no simplification can be made. */
9638 static tree
9639 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9641 if (!validate_arg (arg1, POINTER_TYPE)
9642 || !validate_arg (arg2, POINTER_TYPE)
9643 || !validate_arg (len, INTEGER_TYPE))
9644 return NULL_TREE;
9646 /* If the LEN parameter is zero, return zero. */
9647 if (integer_zerop (len))
9648 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9649 arg1, arg2);
9651 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9652 if (operand_equal_p (arg1, arg2, 0))
9653 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9655 /* If len parameter is one, return an expression corresponding to
9656 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9657 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
9659 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9660 tree cst_uchar_ptr_node
9661 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9663 tree ind1
9664 = fold_convert_loc (loc, integer_type_node,
9665 build1 (INDIRECT_REF, cst_uchar_node,
9666 fold_convert_loc (loc,
9667 cst_uchar_ptr_node,
9668 arg1)));
9669 tree ind2
9670 = fold_convert_loc (loc, integer_type_node,
9671 build1 (INDIRECT_REF, cst_uchar_node,
9672 fold_convert_loc (loc,
9673 cst_uchar_ptr_node,
9674 arg2)));
9675 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9678 return NULL_TREE;
9681 /* Fold a call to builtin isascii with argument ARG. */
9683 static tree
9684 fold_builtin_isascii (location_t loc, tree arg)
9686 if (!validate_arg (arg, INTEGER_TYPE))
9687 return NULL_TREE;
9688 else
9690 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9691 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9692 build_int_cst (integer_type_node,
9693 ~ (unsigned HOST_WIDE_INT) 0x7f));
9694 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9695 arg, integer_zero_node);
9699 /* Fold a call to builtin toascii with argument ARG. */
9701 static tree
9702 fold_builtin_toascii (location_t loc, tree arg)
9704 if (!validate_arg (arg, INTEGER_TYPE))
9705 return NULL_TREE;
9707 /* Transform toascii(c) -> (c & 0x7f). */
9708 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9709 build_int_cst (integer_type_node, 0x7f));
9712 /* Fold a call to builtin isdigit with argument ARG. */
9714 static tree
9715 fold_builtin_isdigit (location_t loc, tree arg)
9717 if (!validate_arg (arg, INTEGER_TYPE))
9718 return NULL_TREE;
9719 else
9721 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9722 /* According to the C standard, isdigit is unaffected by locale.
9723 However, it definitely is affected by the target character set. */
9724 unsigned HOST_WIDE_INT target_digit0
9725 = lang_hooks.to_target_charset ('0');
9727 if (target_digit0 == 0)
9728 return NULL_TREE;
9730 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9731 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9732 build_int_cst (unsigned_type_node, target_digit0));
9733 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9734 build_int_cst (unsigned_type_node, 9));
9738 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9740 static tree
9741 fold_builtin_fabs (location_t loc, tree arg, tree type)
9743 if (!validate_arg (arg, REAL_TYPE))
9744 return NULL_TREE;
9746 arg = fold_convert_loc (loc, type, arg);
9747 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9750 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9752 static tree
9753 fold_builtin_abs (location_t loc, tree arg, tree type)
9755 if (!validate_arg (arg, INTEGER_TYPE))
9756 return NULL_TREE;
9758 arg = fold_convert_loc (loc, type, arg);
9759 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9762 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9764 static tree
9765 fold_builtin_carg (location_t loc, tree arg, tree type)
9767 if (validate_arg (arg, COMPLEX_TYPE)
9768 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9770 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9772 if (atan2_fn)
9774 tree new_arg = builtin_save_expr (arg);
9775 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9776 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9777 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9781 return NULL_TREE;
9784 /* Fold a call to builtin frexp, we can assume the base is 2. */
9786 static tree
9787 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9789 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9790 return NULL_TREE;
9792 STRIP_NOPS (arg0);
9794 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9795 return NULL_TREE;
9797 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9799 /* Proceed if a valid pointer type was passed in. */
9800 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9802 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9803 tree frac, exp;
9805 switch (value->cl)
9807 case rvc_zero:
9808 /* For +-0, return (*exp = 0, +-0). */
9809 exp = integer_zero_node;
9810 frac = arg0;
9811 break;
9812 case rvc_nan:
9813 case rvc_inf:
9814 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9815 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9816 case rvc_normal:
9818 /* Since the frexp function always expects base 2, and in
9819 GCC normalized significands are already in the range
9820 [0.5, 1.0), we have exactly what frexp wants. */
9821 REAL_VALUE_TYPE frac_rvt = *value;
9822 SET_REAL_EXP (&frac_rvt, 0);
9823 frac = build_real (rettype, frac_rvt);
9824 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9826 break;
9827 default:
9828 gcc_unreachable ();
9831 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9832 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9833 TREE_SIDE_EFFECTS (arg1) = 1;
9834 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9837 return NULL_TREE;
9840 /* Fold a call to builtin modf. */
9842 static tree
9843 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9845 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9846 return NULL_TREE;
9848 STRIP_NOPS (arg0);
9850 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9851 return NULL_TREE;
9853 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9855 /* Proceed if a valid pointer type was passed in. */
9856 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9858 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9859 REAL_VALUE_TYPE trunc, frac;
9861 switch (value->cl)
9863 case rvc_nan:
9864 case rvc_zero:
9865 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9866 trunc = frac = *value;
9867 break;
9868 case rvc_inf:
9869 /* For +-Inf, return (*arg1 = arg0, +-0). */
9870 frac = dconst0;
9871 frac.sign = value->sign;
9872 trunc = *value;
9873 break;
9874 case rvc_normal:
9875 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9876 real_trunc (&trunc, VOIDmode, value);
9877 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9878 /* If the original number was negative and already
9879 integral, then the fractional part is -0.0. */
9880 if (value->sign && frac.cl == rvc_zero)
9881 frac.sign = value->sign;
9882 break;
9885 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9886 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9887 build_real (rettype, trunc));
9888 TREE_SIDE_EFFECTS (arg1) = 1;
9889 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9890 build_real (rettype, frac));
9893 return NULL_TREE;
9896 /* Given a location LOC, an interclass builtin function decl FNDECL
9897 and its single argument ARG, return an folded expression computing
9898 the same, or NULL_TREE if we either couldn't or didn't want to fold
9899 (the latter happen if there's an RTL instruction available). */
9901 static tree
9902 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9904 machine_mode mode;
9906 if (!validate_arg (arg, REAL_TYPE))
9907 return NULL_TREE;
9909 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9910 return NULL_TREE;
9912 mode = TYPE_MODE (TREE_TYPE (arg));
9914 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
9916 /* If there is no optab, try generic code. */
9917 switch (DECL_FUNCTION_CODE (fndecl))
9919 tree result;
9921 CASE_FLT_FN (BUILT_IN_ISINF):
9923 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9924 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9925 tree type = TREE_TYPE (arg);
9926 REAL_VALUE_TYPE r;
9927 char buf[128];
9929 if (is_ibm_extended)
9931 /* NaN and Inf are encoded in the high-order double value
9932 only. The low-order value is not significant. */
9933 type = double_type_node;
9934 mode = DFmode;
9935 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9937 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9938 real_from_string (&r, buf);
9939 result = build_call_expr (isgr_fn, 2,
9940 fold_build1_loc (loc, ABS_EXPR, type, arg),
9941 build_real (type, r));
9942 return result;
9944 CASE_FLT_FN (BUILT_IN_FINITE):
9945 case BUILT_IN_ISFINITE:
9947 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9948 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9949 tree type = TREE_TYPE (arg);
9950 REAL_VALUE_TYPE r;
9951 char buf[128];
9953 if (is_ibm_extended)
9955 /* NaN and Inf are encoded in the high-order double value
9956 only. The low-order value is not significant. */
9957 type = double_type_node;
9958 mode = DFmode;
9959 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9961 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9962 real_from_string (&r, buf);
9963 result = build_call_expr (isle_fn, 2,
9964 fold_build1_loc (loc, ABS_EXPR, type, arg),
9965 build_real (type, r));
9966 /*result = fold_build2_loc (loc, UNGT_EXPR,
9967 TREE_TYPE (TREE_TYPE (fndecl)),
9968 fold_build1_loc (loc, ABS_EXPR, type, arg),
9969 build_real (type, r));
9970 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9971 TREE_TYPE (TREE_TYPE (fndecl)),
9972 result);*/
9973 return result;
9975 case BUILT_IN_ISNORMAL:
9977 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9978 islessequal(fabs(x),DBL_MAX). */
9979 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9980 tree type = TREE_TYPE (arg);
9981 tree orig_arg, max_exp, min_exp;
9982 machine_mode orig_mode = mode;
9983 REAL_VALUE_TYPE rmax, rmin;
9984 char buf[128];
9986 orig_arg = arg = builtin_save_expr (arg);
9987 if (is_ibm_extended)
9989 /* Use double to test the normal range of IBM extended
9990 precision. Emin for IBM extended precision is
9991 different to emin for IEEE double, being 53 higher
9992 since the low double exponent is at least 53 lower
9993 than the high double exponent. */
9994 type = double_type_node;
9995 mode = DFmode;
9996 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9998 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
10000 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
10001 real_from_string (&rmax, buf);
10002 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
10003 real_from_string (&rmin, buf);
10004 max_exp = build_real (type, rmax);
10005 min_exp = build_real (type, rmin);
10007 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
10008 if (is_ibm_extended)
10010 /* Testing the high end of the range is done just using
10011 the high double, using the same test as isfinite().
10012 For the subnormal end of the range we first test the
10013 high double, then if its magnitude is equal to the
10014 limit of 0x1p-969, we test whether the low double is
10015 non-zero and opposite sign to the high double. */
10016 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
10017 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
10018 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
10019 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
10020 arg, min_exp);
10021 tree as_complex = build1 (VIEW_CONVERT_EXPR,
10022 complex_double_type_node, orig_arg);
10023 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
10024 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
10025 tree zero = build_real (type, dconst0);
10026 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
10027 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
10028 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
10029 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
10030 fold_build3 (COND_EXPR,
10031 integer_type_node,
10032 hilt, logt, lolt));
10033 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
10034 eq_min, ok_lo);
10035 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
10036 gt_min, eq_min);
10038 else
10040 tree const isge_fn
10041 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
10042 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
10044 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
10045 max_exp, min_exp);
10046 return result;
10048 default:
10049 break;
10052 return NULL_TREE;
10055 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
10056 ARG is the argument for the call. */
10058 static tree
10059 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
10061 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10063 if (!validate_arg (arg, REAL_TYPE))
10064 return NULL_TREE;
10066 switch (builtin_index)
10068 case BUILT_IN_ISINF:
10069 if (!HONOR_INFINITIES (arg))
10070 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10072 return NULL_TREE;
10074 case BUILT_IN_ISINF_SIGN:
10076 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10077 /* In a boolean context, GCC will fold the inner COND_EXPR to
10078 1. So e.g. "if (isinf_sign(x))" would be folded to just
10079 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10080 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
10081 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
10082 tree tmp = NULL_TREE;
10084 arg = builtin_save_expr (arg);
10086 if (signbit_fn && isinf_fn)
10088 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
10089 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
10091 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10092 signbit_call, integer_zero_node);
10093 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10094 isinf_call, integer_zero_node);
10096 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
10097 integer_minus_one_node, integer_one_node);
10098 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10099 isinf_call, tmp,
10100 integer_zero_node);
10103 return tmp;
10106 case BUILT_IN_ISFINITE:
10107 if (!HONOR_NANS (arg)
10108 && !HONOR_INFINITIES (arg))
10109 return omit_one_operand_loc (loc, type, integer_one_node, arg);
10111 return NULL_TREE;
10113 case BUILT_IN_ISNAN:
10114 if (!HONOR_NANS (arg))
10115 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10118 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
10119 if (is_ibm_extended)
10121 /* NaN and Inf are encoded in the high-order double value
10122 only. The low-order value is not significant. */
10123 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
10126 arg = builtin_save_expr (arg);
10127 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
10129 default:
10130 gcc_unreachable ();
10134 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10135 This builtin will generate code to return the appropriate floating
10136 point classification depending on the value of the floating point
10137 number passed in. The possible return values must be supplied as
10138 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10139 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10140 one floating point argument which is "type generic". */
10142 static tree
10143 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
10145 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10146 arg, type, res, tmp;
10147 machine_mode mode;
10148 REAL_VALUE_TYPE r;
10149 char buf[128];
10151 /* Verify the required arguments in the original call. */
10152 if (nargs != 6
10153 || !validate_arg (args[0], INTEGER_TYPE)
10154 || !validate_arg (args[1], INTEGER_TYPE)
10155 || !validate_arg (args[2], INTEGER_TYPE)
10156 || !validate_arg (args[3], INTEGER_TYPE)
10157 || !validate_arg (args[4], INTEGER_TYPE)
10158 || !validate_arg (args[5], REAL_TYPE))
10159 return NULL_TREE;
10161 fp_nan = args[0];
10162 fp_infinite = args[1];
10163 fp_normal = args[2];
10164 fp_subnormal = args[3];
10165 fp_zero = args[4];
10166 arg = args[5];
10167 type = TREE_TYPE (arg);
10168 mode = TYPE_MODE (type);
10169 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10171 /* fpclassify(x) ->
10172 isnan(x) ? FP_NAN :
10173 (fabs(x) == Inf ? FP_INFINITE :
10174 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10175 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10177 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10178 build_real (type, dconst0));
10179 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10180 tmp, fp_zero, fp_subnormal);
10182 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10183 real_from_string (&r, buf);
10184 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10185 arg, build_real (type, r));
10186 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10188 if (HONOR_INFINITIES (mode))
10190 real_inf (&r);
10191 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10192 build_real (type, r));
10193 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10194 fp_infinite, res);
10197 if (HONOR_NANS (mode))
10199 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10200 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10203 return res;
10206 /* Fold a call to an unordered comparison function such as
10207 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10208 being called and ARG0 and ARG1 are the arguments for the call.
10209 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10210 the opposite of the desired result. UNORDERED_CODE is used
10211 for modes that can hold NaNs and ORDERED_CODE is used for
10212 the rest. */
10214 static tree
10215 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10216 enum tree_code unordered_code,
10217 enum tree_code ordered_code)
10219 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10220 enum tree_code code;
10221 tree type0, type1;
10222 enum tree_code code0, code1;
10223 tree cmp_type = NULL_TREE;
10225 type0 = TREE_TYPE (arg0);
10226 type1 = TREE_TYPE (arg1);
10228 code0 = TREE_CODE (type0);
10229 code1 = TREE_CODE (type1);
10231 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10232 /* Choose the wider of two real types. */
10233 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10234 ? type0 : type1;
10235 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10236 cmp_type = type0;
10237 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10238 cmp_type = type1;
10240 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10241 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10243 if (unordered_code == UNORDERED_EXPR)
10245 if (!HONOR_NANS (arg0))
10246 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10247 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10250 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
10251 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10252 fold_build2_loc (loc, code, type, arg0, arg1));
10255 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
10256 arithmetics if it can never overflow, or into internal functions that
10257 return both result of arithmetics and overflowed boolean flag in
10258 a complex integer result, or some other check for overflow.
10259 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
10260 checking part of that. */
10262 static tree
10263 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
10264 tree arg0, tree arg1, tree arg2)
10266 enum internal_fn ifn = IFN_LAST;
10267 /* The code of the expression corresponding to the built-in. */
10268 enum tree_code opcode = ERROR_MARK;
10269 bool ovf_only = false;
10271 switch (fcode)
10273 case BUILT_IN_ADD_OVERFLOW_P:
10274 ovf_only = true;
10275 /* FALLTHRU */
10276 case BUILT_IN_ADD_OVERFLOW:
10277 case BUILT_IN_SADD_OVERFLOW:
10278 case BUILT_IN_SADDL_OVERFLOW:
10279 case BUILT_IN_SADDLL_OVERFLOW:
10280 case BUILT_IN_UADD_OVERFLOW:
10281 case BUILT_IN_UADDL_OVERFLOW:
10282 case BUILT_IN_UADDLL_OVERFLOW:
10283 opcode = PLUS_EXPR;
10284 ifn = IFN_ADD_OVERFLOW;
10285 break;
10286 case BUILT_IN_SUB_OVERFLOW_P:
10287 ovf_only = true;
10288 /* FALLTHRU */
10289 case BUILT_IN_SUB_OVERFLOW:
10290 case BUILT_IN_SSUB_OVERFLOW:
10291 case BUILT_IN_SSUBL_OVERFLOW:
10292 case BUILT_IN_SSUBLL_OVERFLOW:
10293 case BUILT_IN_USUB_OVERFLOW:
10294 case BUILT_IN_USUBL_OVERFLOW:
10295 case BUILT_IN_USUBLL_OVERFLOW:
10296 opcode = MINUS_EXPR;
10297 ifn = IFN_SUB_OVERFLOW;
10298 break;
10299 case BUILT_IN_MUL_OVERFLOW_P:
10300 ovf_only = true;
10301 /* FALLTHRU */
10302 case BUILT_IN_MUL_OVERFLOW:
10303 case BUILT_IN_SMUL_OVERFLOW:
10304 case BUILT_IN_SMULL_OVERFLOW:
10305 case BUILT_IN_SMULLL_OVERFLOW:
10306 case BUILT_IN_UMUL_OVERFLOW:
10307 case BUILT_IN_UMULL_OVERFLOW:
10308 case BUILT_IN_UMULLL_OVERFLOW:
10309 opcode = MULT_EXPR;
10310 ifn = IFN_MUL_OVERFLOW;
10311 break;
10312 default:
10313 gcc_unreachable ();
10316 /* For the "generic" overloads, the first two arguments can have different
10317 types and the last argument determines the target type to use to check
10318 for overflow. The arguments of the other overloads all have the same
10319 type. */
10320 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
10322 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
10323 arguments are constant, attempt to fold the built-in call into a constant
10324 expression indicating whether or not it detected an overflow. */
10325 if (ovf_only
10326 && TREE_CODE (arg0) == INTEGER_CST
10327 && TREE_CODE (arg1) == INTEGER_CST)
10328 /* Perform the computation in the target type and check for overflow. */
10329 return omit_one_operand_loc (loc, boolean_type_node,
10330 arith_overflowed_p (opcode, type, arg0, arg1)
10331 ? boolean_true_node : boolean_false_node,
10332 arg2);
10334 tree intres, ovfres;
10335 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10337 intres = fold_binary_loc (loc, opcode, type,
10338 fold_convert_loc (loc, type, arg0),
10339 fold_convert_loc (loc, type, arg1));
10340 if (TREE_OVERFLOW (intres))
10341 intres = drop_tree_overflow (intres);
10342 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
10343 ? boolean_true_node : boolean_false_node);
10345 else
10347 tree ctype = build_complex_type (type);
10348 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
10349 arg0, arg1);
10350 tree tgt = save_expr (call);
10351 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
10352 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
10353 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
10356 if (ovf_only)
10357 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
10359 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
10360 tree store
10361 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
10362 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
10365 /* Fold a call to __builtin_FILE to a constant string. */
10367 static inline tree
10368 fold_builtin_FILE (location_t loc)
10370 if (const char *fname = LOCATION_FILE (loc))
10372 /* The documentation says this builtin is equivalent to the preprocessor
10373 __FILE__ macro so it appears appropriate to use the same file prefix
10374 mappings. */
10375 fname = remap_macro_filename (fname);
10376 return build_string_literal (strlen (fname) + 1, fname);
10379 return build_string_literal (1, "");
10382 /* Fold a call to __builtin_FUNCTION to a constant string. */
10384 static inline tree
10385 fold_builtin_FUNCTION ()
10387 const char *name = "";
10389 if (current_function_decl)
10390 name = lang_hooks.decl_printable_name (current_function_decl, 0);
10392 return build_string_literal (strlen (name) + 1, name);
10395 /* Fold a call to __builtin_LINE to an integer constant. */
10397 static inline tree
10398 fold_builtin_LINE (location_t loc, tree type)
10400 return build_int_cst (type, LOCATION_LINE (loc));
10403 /* Fold a call to built-in function FNDECL with 0 arguments.
10404 This function returns NULL_TREE if no simplification was possible. */
10406 static tree
10407 fold_builtin_0 (location_t loc, tree fndecl)
10409 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10410 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10411 switch (fcode)
10413 case BUILT_IN_FILE:
10414 return fold_builtin_FILE (loc);
10416 case BUILT_IN_FUNCTION:
10417 return fold_builtin_FUNCTION ();
10419 case BUILT_IN_LINE:
10420 return fold_builtin_LINE (loc, type);
10422 CASE_FLT_FN (BUILT_IN_INF):
10423 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
10424 case BUILT_IN_INFD32:
10425 case BUILT_IN_INFD64:
10426 case BUILT_IN_INFD128:
10427 return fold_builtin_inf (loc, type, true);
10429 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10430 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
10431 return fold_builtin_inf (loc, type, false);
10433 case BUILT_IN_CLASSIFY_TYPE:
10434 return fold_builtin_classify_type (NULL_TREE);
10436 default:
10437 break;
10439 return NULL_TREE;
10442 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10443 This function returns NULL_TREE if no simplification was possible. */
10445 static tree
10446 fold_builtin_1 (location_t loc, tree expr, tree fndecl, tree arg0)
10448 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10449 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10451 if (TREE_CODE (arg0) == ERROR_MARK)
10452 return NULL_TREE;
10454 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
10455 return ret;
10457 switch (fcode)
10459 case BUILT_IN_CONSTANT_P:
10461 tree val = fold_builtin_constant_p (arg0);
10463 /* Gimplification will pull the CALL_EXPR for the builtin out of
10464 an if condition. When not optimizing, we'll not CSE it back.
10465 To avoid link error types of regressions, return false now. */
10466 if (!val && !optimize)
10467 val = integer_zero_node;
10469 return val;
10472 case BUILT_IN_CLASSIFY_TYPE:
10473 return fold_builtin_classify_type (arg0);
10475 case BUILT_IN_STRLEN:
10476 return fold_builtin_strlen (loc, expr, type, arg0);
10478 CASE_FLT_FN (BUILT_IN_FABS):
10479 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
10480 case BUILT_IN_FABSD32:
10481 case BUILT_IN_FABSD64:
10482 case BUILT_IN_FABSD128:
10483 return fold_builtin_fabs (loc, arg0, type);
10485 case BUILT_IN_ABS:
10486 case BUILT_IN_LABS:
10487 case BUILT_IN_LLABS:
10488 case BUILT_IN_IMAXABS:
10489 return fold_builtin_abs (loc, arg0, type);
10491 CASE_FLT_FN (BUILT_IN_CONJ):
10492 if (validate_arg (arg0, COMPLEX_TYPE)
10493 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10494 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10495 break;
10497 CASE_FLT_FN (BUILT_IN_CREAL):
10498 if (validate_arg (arg0, COMPLEX_TYPE)
10499 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10500 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
10501 break;
10503 CASE_FLT_FN (BUILT_IN_CIMAG):
10504 if (validate_arg (arg0, COMPLEX_TYPE)
10505 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10506 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10507 break;
10509 CASE_FLT_FN (BUILT_IN_CARG):
10510 return fold_builtin_carg (loc, arg0, type);
10512 case BUILT_IN_ISASCII:
10513 return fold_builtin_isascii (loc, arg0);
10515 case BUILT_IN_TOASCII:
10516 return fold_builtin_toascii (loc, arg0);
10518 case BUILT_IN_ISDIGIT:
10519 return fold_builtin_isdigit (loc, arg0);
10521 CASE_FLT_FN (BUILT_IN_FINITE):
10522 case BUILT_IN_FINITED32:
10523 case BUILT_IN_FINITED64:
10524 case BUILT_IN_FINITED128:
10525 case BUILT_IN_ISFINITE:
10527 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10528 if (ret)
10529 return ret;
10530 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10533 CASE_FLT_FN (BUILT_IN_ISINF):
10534 case BUILT_IN_ISINFD32:
10535 case BUILT_IN_ISINFD64:
10536 case BUILT_IN_ISINFD128:
10538 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10539 if (ret)
10540 return ret;
10541 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10544 case BUILT_IN_ISNORMAL:
10545 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10547 case BUILT_IN_ISINF_SIGN:
10548 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10550 CASE_FLT_FN (BUILT_IN_ISNAN):
10551 case BUILT_IN_ISNAND32:
10552 case BUILT_IN_ISNAND64:
10553 case BUILT_IN_ISNAND128:
10554 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10556 case BUILT_IN_FREE:
10557 if (integer_zerop (arg0))
10558 return build_empty_stmt (loc);
10559 break;
10561 default:
10562 break;
10565 return NULL_TREE;
10569 /* Folds a call EXPR (which may be null) to built-in function FNDECL
10570 with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
10571 if no simplification was possible. */
10573 static tree
10574 fold_builtin_2 (location_t loc, tree expr, tree fndecl, tree arg0, tree arg1)
10576 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10577 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10579 if (TREE_CODE (arg0) == ERROR_MARK
10580 || TREE_CODE (arg1) == ERROR_MARK)
10581 return NULL_TREE;
10583 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
10584 return ret;
10586 switch (fcode)
10588 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10589 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10590 if (validate_arg (arg0, REAL_TYPE)
10591 && validate_arg (arg1, POINTER_TYPE))
10592 return do_mpfr_lgamma_r (arg0, arg1, type);
10593 break;
10595 CASE_FLT_FN (BUILT_IN_FREXP):
10596 return fold_builtin_frexp (loc, arg0, arg1, type);
10598 CASE_FLT_FN (BUILT_IN_MODF):
10599 return fold_builtin_modf (loc, arg0, arg1, type);
10601 case BUILT_IN_STRSPN:
10602 return fold_builtin_strspn (loc, expr, arg0, arg1);
10604 case BUILT_IN_STRCSPN:
10605 return fold_builtin_strcspn (loc, expr, arg0, arg1);
10607 case BUILT_IN_STRPBRK:
10608 return fold_builtin_strpbrk (loc, expr, arg0, arg1, type);
10610 case BUILT_IN_EXPECT:
10611 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
10613 case BUILT_IN_ISGREATER:
10614 return fold_builtin_unordered_cmp (loc, fndecl,
10615 arg0, arg1, UNLE_EXPR, LE_EXPR);
10616 case BUILT_IN_ISGREATEREQUAL:
10617 return fold_builtin_unordered_cmp (loc, fndecl,
10618 arg0, arg1, UNLT_EXPR, LT_EXPR);
10619 case BUILT_IN_ISLESS:
10620 return fold_builtin_unordered_cmp (loc, fndecl,
10621 arg0, arg1, UNGE_EXPR, GE_EXPR);
10622 case BUILT_IN_ISLESSEQUAL:
10623 return fold_builtin_unordered_cmp (loc, fndecl,
10624 arg0, arg1, UNGT_EXPR, GT_EXPR);
10625 case BUILT_IN_ISLESSGREATER:
10626 return fold_builtin_unordered_cmp (loc, fndecl,
10627 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10628 case BUILT_IN_ISUNORDERED:
10629 return fold_builtin_unordered_cmp (loc, fndecl,
10630 arg0, arg1, UNORDERED_EXPR,
10631 NOP_EXPR);
10633 /* We do the folding for va_start in the expander. */
10634 case BUILT_IN_VA_START:
10635 break;
10637 case BUILT_IN_OBJECT_SIZE:
10638 return fold_builtin_object_size (arg0, arg1);
10640 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10641 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10643 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10644 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10646 default:
10647 break;
10649 return NULL_TREE;
10652 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10653 and ARG2.
10654 This function returns NULL_TREE if no simplification was possible. */
10656 static tree
10657 fold_builtin_3 (location_t loc, tree fndecl,
10658 tree arg0, tree arg1, tree arg2)
10660 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10661 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10663 if (TREE_CODE (arg0) == ERROR_MARK
10664 || TREE_CODE (arg1) == ERROR_MARK
10665 || TREE_CODE (arg2) == ERROR_MARK)
10666 return NULL_TREE;
10668 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
10669 arg0, arg1, arg2))
10670 return ret;
10672 switch (fcode)
10675 CASE_FLT_FN (BUILT_IN_SINCOS):
10676 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10678 CASE_FLT_FN (BUILT_IN_REMQUO):
10679 if (validate_arg (arg0, REAL_TYPE)
10680 && validate_arg (arg1, REAL_TYPE)
10681 && validate_arg (arg2, POINTER_TYPE))
10682 return do_mpfr_remquo (arg0, arg1, arg2);
10683 break;
10685 case BUILT_IN_MEMCMP:
10686 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
10688 case BUILT_IN_EXPECT:
10689 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
10691 case BUILT_IN_EXPECT_WITH_PROBABILITY:
10692 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
10694 case BUILT_IN_ADD_OVERFLOW:
10695 case BUILT_IN_SUB_OVERFLOW:
10696 case BUILT_IN_MUL_OVERFLOW:
10697 case BUILT_IN_ADD_OVERFLOW_P:
10698 case BUILT_IN_SUB_OVERFLOW_P:
10699 case BUILT_IN_MUL_OVERFLOW_P:
10700 case BUILT_IN_SADD_OVERFLOW:
10701 case BUILT_IN_SADDL_OVERFLOW:
10702 case BUILT_IN_SADDLL_OVERFLOW:
10703 case BUILT_IN_SSUB_OVERFLOW:
10704 case BUILT_IN_SSUBL_OVERFLOW:
10705 case BUILT_IN_SSUBLL_OVERFLOW:
10706 case BUILT_IN_SMUL_OVERFLOW:
10707 case BUILT_IN_SMULL_OVERFLOW:
10708 case BUILT_IN_SMULLL_OVERFLOW:
10709 case BUILT_IN_UADD_OVERFLOW:
10710 case BUILT_IN_UADDL_OVERFLOW:
10711 case BUILT_IN_UADDLL_OVERFLOW:
10712 case BUILT_IN_USUB_OVERFLOW:
10713 case BUILT_IN_USUBL_OVERFLOW:
10714 case BUILT_IN_USUBLL_OVERFLOW:
10715 case BUILT_IN_UMUL_OVERFLOW:
10716 case BUILT_IN_UMULL_OVERFLOW:
10717 case BUILT_IN_UMULLL_OVERFLOW:
10718 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10720 default:
10721 break;
10723 return NULL_TREE;
10726 /* Folds a call EXPR (which may be null) to built-in function FNDECL.
10727 ARGS is an array of NARGS arguments. IGNORE is true if the result
10728 of the function call is ignored. This function returns NULL_TREE
10729 if no simplification was possible. */
10731 static tree
10732 fold_builtin_n (location_t loc, tree expr, tree fndecl, tree *args,
10733 int nargs, bool)
10735 tree ret = NULL_TREE;
10737 switch (nargs)
10739 case 0:
10740 ret = fold_builtin_0 (loc, fndecl);
10741 break;
10742 case 1:
10743 ret = fold_builtin_1 (loc, expr, fndecl, args[0]);
10744 break;
10745 case 2:
10746 ret = fold_builtin_2 (loc, expr, fndecl, args[0], args[1]);
10747 break;
10748 case 3:
10749 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
10750 break;
10751 default:
10752 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
10753 break;
10755 if (ret)
10757 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10758 SET_EXPR_LOCATION (ret, loc);
10759 return ret;
10761 return NULL_TREE;
10764 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10765 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10766 of arguments in ARGS to be omitted. OLDNARGS is the number of
10767 elements in ARGS. */
10769 static tree
10770 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10771 int skip, tree fndecl, int n, va_list newargs)
10773 int nargs = oldnargs - skip + n;
10774 tree *buffer;
10776 if (n > 0)
10778 int i, j;
10780 buffer = XALLOCAVEC (tree, nargs);
10781 for (i = 0; i < n; i++)
10782 buffer[i] = va_arg (newargs, tree);
10783 for (j = skip; j < oldnargs; j++, i++)
10784 buffer[i] = args[j];
10786 else
10787 buffer = args + skip;
10789 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10792 /* Return true if FNDECL shouldn't be folded right now.
10793 If a built-in function has an inline attribute always_inline
10794 wrapper, defer folding it after always_inline functions have
10795 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10796 might not be performed. */
10798 bool
10799 avoid_folding_inline_builtin (tree fndecl)
10801 return (DECL_DECLARED_INLINE_P (fndecl)
10802 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10803 && cfun
10804 && !cfun->always_inline_functions_inlined
10805 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10808 /* A wrapper function for builtin folding that prevents warnings for
10809 "statement without effect" and the like, caused by removing the
10810 call node earlier than the warning is generated. */
10812 tree
10813 fold_call_expr (location_t loc, tree exp, bool ignore)
10815 tree ret = NULL_TREE;
10816 tree fndecl = get_callee_fndecl (exp);
10817 if (fndecl && fndecl_built_in_p (fndecl)
10818 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10819 yet. Defer folding until we see all the arguments
10820 (after inlining). */
10821 && !CALL_EXPR_VA_ARG_PACK (exp))
10823 int nargs = call_expr_nargs (exp);
10825 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10826 instead last argument is __builtin_va_arg_pack (). Defer folding
10827 even in that case, until arguments are finalized. */
10828 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10830 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10831 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
10832 return NULL_TREE;
10835 if (avoid_folding_inline_builtin (fndecl))
10836 return NULL_TREE;
10838 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10839 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10840 CALL_EXPR_ARGP (exp), ignore);
10841 else
10843 tree *args = CALL_EXPR_ARGP (exp);
10844 ret = fold_builtin_n (loc, exp, fndecl, args, nargs, ignore);
10845 if (ret)
10846 return ret;
10849 return NULL_TREE;
10852 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10853 N arguments are passed in the array ARGARRAY. Return a folded
10854 expression or NULL_TREE if no simplification was possible. */
10856 tree
10857 fold_builtin_call_array (location_t loc, tree,
10858 tree fn,
10859 int n,
10860 tree *argarray)
10862 if (TREE_CODE (fn) != ADDR_EXPR)
10863 return NULL_TREE;
10865 tree fndecl = TREE_OPERAND (fn, 0);
10866 if (TREE_CODE (fndecl) == FUNCTION_DECL
10867 && fndecl_built_in_p (fndecl))
10869 /* If last argument is __builtin_va_arg_pack (), arguments to this
10870 function are not finalized yet. Defer folding until they are. */
10871 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10873 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10874 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
10875 return NULL_TREE;
10877 if (avoid_folding_inline_builtin (fndecl))
10878 return NULL_TREE;
10879 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10880 return targetm.fold_builtin (fndecl, n, argarray, false);
10881 else
10882 return fold_builtin_n (loc, NULL_TREE, fndecl, argarray, n, false);
10885 return NULL_TREE;
10888 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10889 along with N new arguments specified as the "..." parameters. SKIP
10890 is the number of arguments in EXP to be omitted. This function is used
10891 to do varargs-to-varargs transformations. */
10893 static tree
10894 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10896 va_list ap;
10897 tree t;
10899 va_start (ap, n);
10900 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10901 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10902 va_end (ap);
10904 return t;
10907 /* Validate a single argument ARG against a tree code CODE representing
10908 a type. Return true when argument is valid. */
10910 static bool
10911 validate_arg (const_tree arg, enum tree_code code)
10913 if (!arg)
10914 return false;
10915 else if (code == POINTER_TYPE)
10916 return POINTER_TYPE_P (TREE_TYPE (arg));
10917 else if (code == INTEGER_TYPE)
10918 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10919 return code == TREE_CODE (TREE_TYPE (arg));
10922 /* This function validates the types of a function call argument list
10923 against a specified list of tree_codes. If the last specifier is a 0,
10924 that represents an ellipses, otherwise the last specifier must be a
10925 VOID_TYPE.
10927 This is the GIMPLE version of validate_arglist. Eventually we want to
10928 completely convert builtins.c to work from GIMPLEs and the tree based
10929 validate_arglist will then be removed. */
10931 bool
10932 validate_gimple_arglist (const gcall *call, ...)
10934 enum tree_code code;
10935 bool res = 0;
10936 va_list ap;
10937 const_tree arg;
10938 size_t i;
10940 va_start (ap, call);
10941 i = 0;
10945 code = (enum tree_code) va_arg (ap, int);
10946 switch (code)
10948 case 0:
10949 /* This signifies an ellipses, any further arguments are all ok. */
10950 res = true;
10951 goto end;
10952 case VOID_TYPE:
10953 /* This signifies an endlink, if no arguments remain, return
10954 true, otherwise return false. */
10955 res = (i == gimple_call_num_args (call));
10956 goto end;
10957 default:
10958 /* If no parameters remain or the parameter's code does not
10959 match the specified code, return false. Otherwise continue
10960 checking any remaining arguments. */
10961 arg = gimple_call_arg (call, i++);
10962 if (!validate_arg (arg, code))
10963 goto end;
10964 break;
10967 while (1);
10969 /* We need gotos here since we can only have one VA_CLOSE in a
10970 function. */
10971 end: ;
10972 va_end (ap);
10974 return res;
10977 /* Default target-specific builtin expander that does nothing. */
10980 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10981 rtx target ATTRIBUTE_UNUSED,
10982 rtx subtarget ATTRIBUTE_UNUSED,
10983 machine_mode mode ATTRIBUTE_UNUSED,
10984 int ignore ATTRIBUTE_UNUSED)
10986 return NULL_RTX;
10989 /* Returns true is EXP represents data that would potentially reside
10990 in a readonly section. */
10992 bool
10993 readonly_data_expr (tree exp)
10995 STRIP_NOPS (exp);
10997 if (TREE_CODE (exp) != ADDR_EXPR)
10998 return false;
11000 exp = get_base_address (TREE_OPERAND (exp, 0));
11001 if (!exp)
11002 return false;
11004 /* Make sure we call decl_readonly_section only for trees it
11005 can handle (since it returns true for everything it doesn't
11006 understand). */
11007 if (TREE_CODE (exp) == STRING_CST
11008 || TREE_CODE (exp) == CONSTRUCTOR
11009 || (VAR_P (exp) && TREE_STATIC (exp)))
11010 return decl_readonly_section (exp, 0);
11011 else
11012 return false;
11015 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11016 to the call, and TYPE is its return type.
11018 Return NULL_TREE if no simplification was possible, otherwise return the
11019 simplified form of the call as a tree.
11021 The simplified form may be a constant or other expression which
11022 computes the same value, but in a more efficient manner (including
11023 calls to other builtin functions).
11025 The call may contain arguments which need to be evaluated, but
11026 which are not useful to determine the result of the call. In
11027 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11028 COMPOUND_EXPR will be an argument which must be evaluated.
11029 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11030 COMPOUND_EXPR in the chain will contain the tree for the simplified
11031 form of the builtin function call. */
11033 static tree
11034 fold_builtin_strpbrk (location_t loc, tree, tree s1, tree s2, tree type)
11036 if (!validate_arg (s1, POINTER_TYPE)
11037 || !validate_arg (s2, POINTER_TYPE))
11038 return NULL_TREE;
11040 tree fn;
11041 const char *p1, *p2;
11043 p2 = c_getstr (s2);
11044 if (p2 == NULL)
11045 return NULL_TREE;
11047 p1 = c_getstr (s1);
11048 if (p1 != NULL)
11050 const char *r = strpbrk (p1, p2);
11051 tree tem;
11053 if (r == NULL)
11054 return build_int_cst (TREE_TYPE (s1), 0);
11056 /* Return an offset into the constant string argument. */
11057 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11058 return fold_convert_loc (loc, type, tem);
11061 if (p2[0] == '\0')
11062 /* strpbrk(x, "") == NULL.
11063 Evaluate and ignore s1 in case it had side-effects. */
11064 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
11066 if (p2[1] != '\0')
11067 return NULL_TREE; /* Really call strpbrk. */
11069 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11070 if (!fn)
11071 return NULL_TREE;
11073 /* New argument list transforming strpbrk(s1, s2) to
11074 strchr(s1, s2[0]). */
11075 return build_call_expr_loc (loc, fn, 2, s1,
11076 build_int_cst (integer_type_node, p2[0]));
11079 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11080 to the call.
11082 Return NULL_TREE if no simplification was possible, otherwise return the
11083 simplified form of the call as a tree.
11085 The simplified form may be a constant or other expression which
11086 computes the same value, but in a more efficient manner (including
11087 calls to other builtin functions).
11089 The call may contain arguments which need to be evaluated, but
11090 which are not useful to determine the result of the call. In
11091 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11092 COMPOUND_EXPR will be an argument which must be evaluated.
11093 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11094 COMPOUND_EXPR in the chain will contain the tree for the simplified
11095 form of the builtin function call. */
11097 static tree
11098 fold_builtin_strspn (location_t loc, tree expr, tree s1, tree s2)
11100 if (!validate_arg (s1, POINTER_TYPE)
11101 || !validate_arg (s2, POINTER_TYPE))
11102 return NULL_TREE;
11104 if (!check_nul_terminated_array (expr, s1)
11105 || !check_nul_terminated_array (expr, s2))
11106 return NULL_TREE;
11108 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11110 /* If either argument is "", return NULL_TREE. */
11111 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11112 /* Evaluate and ignore both arguments in case either one has
11113 side-effects. */
11114 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11115 s1, s2);
11116 return NULL_TREE;
11119 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11120 to the call.
11122 Return NULL_TREE if no simplification was possible, otherwise return the
11123 simplified form of the call as a tree.
11125 The simplified form may be a constant or other expression which
11126 computes the same value, but in a more efficient manner (including
11127 calls to other builtin functions).
11129 The call may contain arguments which need to be evaluated, but
11130 which are not useful to determine the result of the call. In
11131 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11132 COMPOUND_EXPR will be an argument which must be evaluated.
11133 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11134 COMPOUND_EXPR in the chain will contain the tree for the simplified
11135 form of the builtin function call. */
11137 static tree
11138 fold_builtin_strcspn (location_t loc, tree expr, tree s1, tree s2)
11140 if (!validate_arg (s1, POINTER_TYPE)
11141 || !validate_arg (s2, POINTER_TYPE))
11142 return NULL_TREE;
11144 if (!check_nul_terminated_array (expr, s1)
11145 || !check_nul_terminated_array (expr, s2))
11146 return NULL_TREE;
11148 /* If the first argument is "", return NULL_TREE. */
11149 const char *p1 = c_getstr (s1);
11150 if (p1 && *p1 == '\0')
11152 /* Evaluate and ignore argument s2 in case it has
11153 side-effects. */
11154 return omit_one_operand_loc (loc, size_type_node,
11155 size_zero_node, s2);
11158 /* If the second argument is "", return __builtin_strlen(s1). */
11159 const char *p2 = c_getstr (s2);
11160 if (p2 && *p2 == '\0')
11162 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11164 /* If the replacement _DECL isn't initialized, don't do the
11165 transformation. */
11166 if (!fn)
11167 return NULL_TREE;
11169 return build_call_expr_loc (loc, fn, 1, s1);
11171 return NULL_TREE;
11174 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11175 produced. False otherwise. This is done so that we don't output the error
11176 or warning twice or three times. */
11178 bool
11179 fold_builtin_next_arg (tree exp, bool va_start_p)
11181 tree fntype = TREE_TYPE (current_function_decl);
11182 int nargs = call_expr_nargs (exp);
11183 tree arg;
11184 /* There is good chance the current input_location points inside the
11185 definition of the va_start macro (perhaps on the token for
11186 builtin) in a system header, so warnings will not be emitted.
11187 Use the location in real source code. */
11188 location_t current_location =
11189 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11190 NULL);
11192 if (!stdarg_p (fntype))
11194 error ("%<va_start%> used in function with fixed arguments");
11195 return true;
11198 if (va_start_p)
11200 if (va_start_p && (nargs != 2))
11202 error ("wrong number of arguments to function %<va_start%>");
11203 return true;
11205 arg = CALL_EXPR_ARG (exp, 1);
11207 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11208 when we checked the arguments and if needed issued a warning. */
11209 else
11211 if (nargs == 0)
11213 /* Evidently an out of date version of <stdarg.h>; can't validate
11214 va_start's second argument, but can still work as intended. */
11215 warning_at (current_location,
11216 OPT_Wvarargs,
11217 "%<__builtin_next_arg%> called without an argument");
11218 return true;
11220 else if (nargs > 1)
11222 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11223 return true;
11225 arg = CALL_EXPR_ARG (exp, 0);
11228 if (TREE_CODE (arg) == SSA_NAME)
11229 arg = SSA_NAME_VAR (arg);
11231 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11232 or __builtin_next_arg (0) the first time we see it, after checking
11233 the arguments and if needed issuing a warning. */
11234 if (!integer_zerop (arg))
11236 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11238 /* Strip off all nops for the sake of the comparison. This
11239 is not quite the same as STRIP_NOPS. It does more.
11240 We must also strip off INDIRECT_EXPR for C++ reference
11241 parameters. */
11242 while (CONVERT_EXPR_P (arg)
11243 || TREE_CODE (arg) == INDIRECT_REF)
11244 arg = TREE_OPERAND (arg, 0);
11245 if (arg != last_parm)
11247 /* FIXME: Sometimes with the tree optimizers we can get the
11248 not the last argument even though the user used the last
11249 argument. We just warn and set the arg to be the last
11250 argument so that we will get wrong-code because of
11251 it. */
11252 warning_at (current_location,
11253 OPT_Wvarargs,
11254 "second parameter of %<va_start%> not last named argument");
11257 /* Undefined by C99 7.15.1.4p4 (va_start):
11258 "If the parameter parmN is declared with the register storage
11259 class, with a function or array type, or with a type that is
11260 not compatible with the type that results after application of
11261 the default argument promotions, the behavior is undefined."
11263 else if (DECL_REGISTER (arg))
11265 warning_at (current_location,
11266 OPT_Wvarargs,
11267 "undefined behavior when second parameter of "
11268 "%<va_start%> is declared with %<register%> storage");
11271 /* We want to verify the second parameter just once before the tree
11272 optimizers are run and then avoid keeping it in the tree,
11273 as otherwise we could warn even for correct code like:
11274 void foo (int i, ...)
11275 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11276 if (va_start_p)
11277 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11278 else
11279 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11281 return false;
11285 /* Expand a call EXP to __builtin_object_size. */
11287 static rtx
11288 expand_builtin_object_size (tree exp)
11290 tree ost;
11291 int object_size_type;
11292 tree fndecl = get_callee_fndecl (exp);
11294 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11296 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
11297 exp, fndecl);
11298 expand_builtin_trap ();
11299 return const0_rtx;
11302 ost = CALL_EXPR_ARG (exp, 1);
11303 STRIP_NOPS (ost);
11305 if (TREE_CODE (ost) != INTEGER_CST
11306 || tree_int_cst_sgn (ost) < 0
11307 || compare_tree_int (ost, 3) > 0)
11309 error ("%Klast argument of %qD is not integer constant between 0 and 3",
11310 exp, fndecl);
11311 expand_builtin_trap ();
11312 return const0_rtx;
11315 object_size_type = tree_to_shwi (ost);
11317 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11320 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11321 FCODE is the BUILT_IN_* to use.
11322 Return NULL_RTX if we failed; the caller should emit a normal call,
11323 otherwise try to get the result in TARGET, if convenient (and in
11324 mode MODE if that's convenient). */
11326 static rtx
11327 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
11328 enum built_in_function fcode)
11330 if (!validate_arglist (exp,
11331 POINTER_TYPE,
11332 fcode == BUILT_IN_MEMSET_CHK
11333 ? INTEGER_TYPE : POINTER_TYPE,
11334 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11335 return NULL_RTX;
11337 tree dest = CALL_EXPR_ARG (exp, 0);
11338 tree src = CALL_EXPR_ARG (exp, 1);
11339 tree len = CALL_EXPR_ARG (exp, 2);
11340 tree size = CALL_EXPR_ARG (exp, 3);
11342 /* FIXME: Set access mode to write only for memset et al. */
11343 bool sizes_ok = check_access (exp, len, /*maxread=*/NULL_TREE,
11344 /*srcstr=*/NULL_TREE, size, access_read_write);
11346 if (!tree_fits_uhwi_p (size))
11347 return NULL_RTX;
11349 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11351 /* Avoid transforming the checking call to an ordinary one when
11352 an overflow has been detected or when the call couldn't be
11353 validated because the size is not constant. */
11354 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
11355 return NULL_RTX;
11357 tree fn = NULL_TREE;
11358 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11359 mem{cpy,pcpy,move,set} is available. */
11360 switch (fcode)
11362 case BUILT_IN_MEMCPY_CHK:
11363 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11364 break;
11365 case BUILT_IN_MEMPCPY_CHK:
11366 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11367 break;
11368 case BUILT_IN_MEMMOVE_CHK:
11369 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11370 break;
11371 case BUILT_IN_MEMSET_CHK:
11372 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11373 break;
11374 default:
11375 break;
11378 if (! fn)
11379 return NULL_RTX;
11381 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11382 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11383 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11384 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11386 else if (fcode == BUILT_IN_MEMSET_CHK)
11387 return NULL_RTX;
11388 else
11390 unsigned int dest_align = get_pointer_alignment (dest);
11392 /* If DEST is not a pointer type, call the normal function. */
11393 if (dest_align == 0)
11394 return NULL_RTX;
11396 /* If SRC and DEST are the same (and not volatile), do nothing. */
11397 if (operand_equal_p (src, dest, 0))
11399 tree expr;
11401 if (fcode != BUILT_IN_MEMPCPY_CHK)
11403 /* Evaluate and ignore LEN in case it has side-effects. */
11404 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11405 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11408 expr = fold_build_pointer_plus (dest, len);
11409 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11412 /* __memmove_chk special case. */
11413 if (fcode == BUILT_IN_MEMMOVE_CHK)
11415 unsigned int src_align = get_pointer_alignment (src);
11417 if (src_align == 0)
11418 return NULL_RTX;
11420 /* If src is categorized for a readonly section we can use
11421 normal __memcpy_chk. */
11422 if (readonly_data_expr (src))
11424 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11425 if (!fn)
11426 return NULL_RTX;
11427 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11428 dest, src, len, size);
11429 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11430 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11431 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11434 return NULL_RTX;
11438 /* Emit warning if a buffer overflow is detected at compile time. */
11440 static void
11441 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11443 /* The source string. */
11444 tree srcstr = NULL_TREE;
11445 /* The size of the destination object returned by __builtin_object_size. */
11446 tree objsize = NULL_TREE;
11447 /* The string that is being concatenated with (as in __strcat_chk)
11448 or null if it isn't. */
11449 tree catstr = NULL_TREE;
11450 /* The maximum length of the source sequence in a bounded operation
11451 (such as __strncat_chk) or null if the operation isn't bounded
11452 (such as __strcat_chk). */
11453 tree maxread = NULL_TREE;
11454 /* The exact size of the access (such as in __strncpy_chk). */
11455 tree size = NULL_TREE;
11456 /* The access by the function that's checked. Except for snprintf
11457 both writing and reading is checked. */
11458 access_mode mode = access_read_write;
11460 switch (fcode)
11462 case BUILT_IN_STRCPY_CHK:
11463 case BUILT_IN_STPCPY_CHK:
11464 srcstr = CALL_EXPR_ARG (exp, 1);
11465 objsize = CALL_EXPR_ARG (exp, 2);
11466 break;
11468 case BUILT_IN_STRCAT_CHK:
11469 /* For __strcat_chk the warning will be emitted only if overflowing
11470 by at least strlen (dest) + 1 bytes. */
11471 catstr = CALL_EXPR_ARG (exp, 0);
11472 srcstr = CALL_EXPR_ARG (exp, 1);
11473 objsize = CALL_EXPR_ARG (exp, 2);
11474 break;
11476 case BUILT_IN_STRNCAT_CHK:
11477 catstr = CALL_EXPR_ARG (exp, 0);
11478 srcstr = CALL_EXPR_ARG (exp, 1);
11479 maxread = CALL_EXPR_ARG (exp, 2);
11480 objsize = CALL_EXPR_ARG (exp, 3);
11481 break;
11483 case BUILT_IN_STRNCPY_CHK:
11484 case BUILT_IN_STPNCPY_CHK:
11485 srcstr = CALL_EXPR_ARG (exp, 1);
11486 size = CALL_EXPR_ARG (exp, 2);
11487 objsize = CALL_EXPR_ARG (exp, 3);
11488 break;
11490 case BUILT_IN_SNPRINTF_CHK:
11491 case BUILT_IN_VSNPRINTF_CHK:
11492 maxread = CALL_EXPR_ARG (exp, 1);
11493 objsize = CALL_EXPR_ARG (exp, 3);
11494 /* The only checked access the write to the destination. */
11495 mode = access_write_only;
11496 break;
11497 default:
11498 gcc_unreachable ();
11501 if (catstr && maxread)
11503 /* Check __strncat_chk. There is no way to determine the length
11504 of the string to which the source string is being appended so
11505 just warn when the length of the source string is not known. */
11506 check_strncat_sizes (exp, objsize);
11507 return;
11510 check_access (exp, size, maxread, srcstr, objsize, mode);
11513 /* Emit warning if a buffer overflow is detected at compile time
11514 in __sprintf_chk/__vsprintf_chk calls. */
11516 static void
11517 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11519 tree size, len, fmt;
11520 const char *fmt_str;
11521 int nargs = call_expr_nargs (exp);
11523 /* Verify the required arguments in the original call. */
11525 if (nargs < 4)
11526 return;
11527 size = CALL_EXPR_ARG (exp, 2);
11528 fmt = CALL_EXPR_ARG (exp, 3);
11530 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11531 return;
11533 /* Check whether the format is a literal string constant. */
11534 fmt_str = c_getstr (fmt);
11535 if (fmt_str == NULL)
11536 return;
11538 if (!init_target_chars ())
11539 return;
11541 /* If the format doesn't contain % args or %%, we know its size. */
11542 if (strchr (fmt_str, target_percent) == 0)
11543 len = build_int_cstu (size_type_node, strlen (fmt_str));
11544 /* If the format is "%s" and first ... argument is a string literal,
11545 we know it too. */
11546 else if (fcode == BUILT_IN_SPRINTF_CHK
11547 && strcmp (fmt_str, target_percent_s) == 0)
11549 tree arg;
11551 if (nargs < 5)
11552 return;
11553 arg = CALL_EXPR_ARG (exp, 4);
11554 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11555 return;
11557 len = c_strlen (arg, 1);
11558 if (!len || ! tree_fits_uhwi_p (len))
11559 return;
11561 else
11562 return;
11564 /* Add one for the terminating nul. */
11565 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
11567 check_access (exp, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, len, size,
11568 access_write_only);
11571 /* Emit warning if a free is called with address of a variable. */
11573 static void
11574 maybe_emit_free_warning (tree exp)
11576 if (call_expr_nargs (exp) != 1)
11577 return;
11579 tree arg = CALL_EXPR_ARG (exp, 0);
11581 STRIP_NOPS (arg);
11582 if (TREE_CODE (arg) != ADDR_EXPR)
11583 return;
11585 arg = get_base_address (TREE_OPERAND (arg, 0));
11586 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11587 return;
11589 if (SSA_VAR_P (arg))
11590 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11591 "%Kattempt to free a non-heap object %qD", exp, arg);
11592 else
11593 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11594 "%Kattempt to free a non-heap object", exp);
11597 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11598 if possible. */
11600 static tree
11601 fold_builtin_object_size (tree ptr, tree ost)
11603 unsigned HOST_WIDE_INT bytes;
11604 int object_size_type;
11606 if (!validate_arg (ptr, POINTER_TYPE)
11607 || !validate_arg (ost, INTEGER_TYPE))
11608 return NULL_TREE;
11610 STRIP_NOPS (ost);
11612 if (TREE_CODE (ost) != INTEGER_CST
11613 || tree_int_cst_sgn (ost) < 0
11614 || compare_tree_int (ost, 3) > 0)
11615 return NULL_TREE;
11617 object_size_type = tree_to_shwi (ost);
11619 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11620 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11621 and (size_t) 0 for types 2 and 3. */
11622 if (TREE_SIDE_EFFECTS (ptr))
11623 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11625 if (TREE_CODE (ptr) == ADDR_EXPR)
11627 compute_builtin_object_size (ptr, object_size_type, &bytes);
11628 if (wi::fits_to_tree_p (bytes, size_type_node))
11629 return build_int_cstu (size_type_node, bytes);
11631 else if (TREE_CODE (ptr) == SSA_NAME)
11633 /* If object size is not known yet, delay folding until
11634 later. Maybe subsequent passes will help determining
11635 it. */
11636 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
11637 && wi::fits_to_tree_p (bytes, size_type_node))
11638 return build_int_cstu (size_type_node, bytes);
11641 return NULL_TREE;
11644 /* Builtins with folding operations that operate on "..." arguments
11645 need special handling; we need to store the arguments in a convenient
11646 data structure before attempting any folding. Fortunately there are
11647 only a few builtins that fall into this category. FNDECL is the
11648 function, EXP is the CALL_EXPR for the call. */
11650 static tree
11651 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11653 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11654 tree ret = NULL_TREE;
11656 switch (fcode)
11658 case BUILT_IN_FPCLASSIFY:
11659 ret = fold_builtin_fpclassify (loc, args, nargs);
11660 break;
11662 default:
11663 break;
11665 if (ret)
11667 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11668 SET_EXPR_LOCATION (ret, loc);
11669 TREE_NO_WARNING (ret) = 1;
11670 return ret;
11672 return NULL_TREE;
11675 /* Initialize format string characters in the target charset. */
11677 bool
11678 init_target_chars (void)
11680 static bool init;
11681 if (!init)
11683 target_newline = lang_hooks.to_target_charset ('\n');
11684 target_percent = lang_hooks.to_target_charset ('%');
11685 target_c = lang_hooks.to_target_charset ('c');
11686 target_s = lang_hooks.to_target_charset ('s');
11687 if (target_newline == 0 || target_percent == 0 || target_c == 0
11688 || target_s == 0)
11689 return false;
11691 target_percent_c[0] = target_percent;
11692 target_percent_c[1] = target_c;
11693 target_percent_c[2] = '\0';
11695 target_percent_s[0] = target_percent;
11696 target_percent_s[1] = target_s;
11697 target_percent_s[2] = '\0';
11699 target_percent_s_newline[0] = target_percent;
11700 target_percent_s_newline[1] = target_s;
11701 target_percent_s_newline[2] = target_newline;
11702 target_percent_s_newline[3] = '\0';
11704 init = true;
11706 return true;
11709 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11710 and no overflow/underflow occurred. INEXACT is true if M was not
11711 exactly calculated. TYPE is the tree type for the result. This
11712 function assumes that you cleared the MPFR flags and then
11713 calculated M to see if anything subsequently set a flag prior to
11714 entering this function. Return NULL_TREE if any checks fail. */
11716 static tree
11717 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11719 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11720 overflow/underflow occurred. If -frounding-math, proceed iff the
11721 result of calling FUNC was exact. */
11722 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11723 && (!flag_rounding_math || !inexact))
11725 REAL_VALUE_TYPE rr;
11727 real_from_mpfr (&rr, m, type, MPFR_RNDN);
11728 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11729 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11730 but the mpft_t is not, then we underflowed in the
11731 conversion. */
11732 if (real_isfinite (&rr)
11733 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11735 REAL_VALUE_TYPE rmode;
11737 real_convert (&rmode, TYPE_MODE (type), &rr);
11738 /* Proceed iff the specified mode can hold the value. */
11739 if (real_identical (&rmode, &rr))
11740 return build_real (type, rmode);
11743 return NULL_TREE;
11746 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11747 number and no overflow/underflow occurred. INEXACT is true if M
11748 was not exactly calculated. TYPE is the tree type for the result.
11749 This function assumes that you cleared the MPFR flags and then
11750 calculated M to see if anything subsequently set a flag prior to
11751 entering this function. Return NULL_TREE if any checks fail, if
11752 FORCE_CONVERT is true, then bypass the checks. */
11754 static tree
11755 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11757 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11758 overflow/underflow occurred. If -frounding-math, proceed iff the
11759 result of calling FUNC was exact. */
11760 if (force_convert
11761 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11762 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11763 && (!flag_rounding_math || !inexact)))
11765 REAL_VALUE_TYPE re, im;
11767 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), MPFR_RNDN);
11768 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), MPFR_RNDN);
11769 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11770 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11771 but the mpft_t is not, then we underflowed in the
11772 conversion. */
11773 if (force_convert
11774 || (real_isfinite (&re) && real_isfinite (&im)
11775 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11776 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11778 REAL_VALUE_TYPE re_mode, im_mode;
11780 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11781 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11782 /* Proceed iff the specified mode can hold the value. */
11783 if (force_convert
11784 || (real_identical (&re_mode, &re)
11785 && real_identical (&im_mode, &im)))
11786 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11787 build_real (TREE_TYPE (type), im_mode));
11790 return NULL_TREE;
11793 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
11794 the pointer *(ARG_QUO) and return the result. The type is taken
11795 from the type of ARG0 and is used for setting the precision of the
11796 calculation and results. */
11798 static tree
11799 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
11801 tree const type = TREE_TYPE (arg0);
11802 tree result = NULL_TREE;
11804 STRIP_NOPS (arg0);
11805 STRIP_NOPS (arg1);
11807 /* To proceed, MPFR must exactly represent the target floating point
11808 format, which only happens when the target base equals two. */
11809 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11810 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
11811 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
11813 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
11814 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
11816 if (real_isfinite (ra0) && real_isfinite (ra1))
11818 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11819 const int prec = fmt->p;
11820 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
11821 tree result_rem;
11822 long integer_quo;
11823 mpfr_t m0, m1;
11825 mpfr_inits2 (prec, m0, m1, NULL);
11826 mpfr_from_real (m0, ra0, MPFR_RNDN);
11827 mpfr_from_real (m1, ra1, MPFR_RNDN);
11828 mpfr_clear_flags ();
11829 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
11830 /* Remquo is independent of the rounding mode, so pass
11831 inexact=0 to do_mpfr_ckconv(). */
11832 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
11833 mpfr_clears (m0, m1, NULL);
11834 if (result_rem)
11836 /* MPFR calculates quo in the host's long so it may
11837 return more bits in quo than the target int can hold
11838 if sizeof(host long) > sizeof(target int). This can
11839 happen even for native compilers in LP64 mode. In
11840 these cases, modulo the quo value with the largest
11841 number that the target int can hold while leaving one
11842 bit for the sign. */
11843 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
11844 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
11846 /* Dereference the quo pointer argument. */
11847 arg_quo = build_fold_indirect_ref (arg_quo);
11848 /* Proceed iff a valid pointer type was passed in. */
11849 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
11851 /* Set the value. */
11852 tree result_quo
11853 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
11854 build_int_cst (TREE_TYPE (arg_quo),
11855 integer_quo));
11856 TREE_SIDE_EFFECTS (result_quo) = 1;
11857 /* Combine the quo assignment with the rem. */
11858 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11859 result_quo, result_rem));
11864 return result;
11867 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
11868 resulting value as a tree with type TYPE. The mpfr precision is
11869 set to the precision of TYPE. We assume that this mpfr function
11870 returns zero if the result could be calculated exactly within the
11871 requested precision. In addition, the integer pointer represented
11872 by ARG_SG will be dereferenced and set to the appropriate signgam
11873 (-1,1) value. */
11875 static tree
11876 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
11878 tree result = NULL_TREE;
11880 STRIP_NOPS (arg);
11882 /* To proceed, MPFR must exactly represent the target floating point
11883 format, which only happens when the target base equals two. Also
11884 verify ARG is a constant and that ARG_SG is an int pointer. */
11885 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11886 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
11887 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
11888 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
11890 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
11892 /* In addition to NaN and Inf, the argument cannot be zero or a
11893 negative integer. */
11894 if (real_isfinite (ra)
11895 && ra->cl != rvc_zero
11896 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
11898 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11899 const int prec = fmt->p;
11900 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
11901 int inexact, sg;
11902 mpfr_t m;
11903 tree result_lg;
11905 mpfr_init2 (m, prec);
11906 mpfr_from_real (m, ra, MPFR_RNDN);
11907 mpfr_clear_flags ();
11908 inexact = mpfr_lgamma (m, &sg, m, rnd);
11909 result_lg = do_mpfr_ckconv (m, type, inexact);
11910 mpfr_clear (m);
11911 if (result_lg)
11913 tree result_sg;
11915 /* Dereference the arg_sg pointer argument. */
11916 arg_sg = build_fold_indirect_ref (arg_sg);
11917 /* Assign the signgam value into *arg_sg. */
11918 result_sg = fold_build2 (MODIFY_EXPR,
11919 TREE_TYPE (arg_sg), arg_sg,
11920 build_int_cst (TREE_TYPE (arg_sg), sg));
11921 TREE_SIDE_EFFECTS (result_sg) = 1;
11922 /* Combine the signgam assignment with the lgamma result. */
11923 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11924 result_sg, result_lg));
11929 return result;
11932 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
11933 mpc function FUNC on it and return the resulting value as a tree
11934 with type TYPE. The mpfr precision is set to the precision of
11935 TYPE. We assume that function FUNC returns zero if the result
11936 could be calculated exactly within the requested precision. If
11937 DO_NONFINITE is true, then fold expressions containing Inf or NaN
11938 in the arguments and/or results. */
11940 tree
11941 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
11942 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
11944 tree result = NULL_TREE;
11946 STRIP_NOPS (arg0);
11947 STRIP_NOPS (arg1);
11949 /* To proceed, MPFR must exactly represent the target floating point
11950 format, which only happens when the target base equals two. */
11951 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
11952 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
11953 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
11954 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
11955 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
11957 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
11958 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
11959 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
11960 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
11962 if (do_nonfinite
11963 || (real_isfinite (re0) && real_isfinite (im0)
11964 && real_isfinite (re1) && real_isfinite (im1)))
11966 const struct real_format *const fmt =
11967 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11968 const int prec = fmt->p;
11969 const mpfr_rnd_t rnd = fmt->round_towards_zero
11970 ? MPFR_RNDZ : MPFR_RNDN;
11971 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11972 int inexact;
11973 mpc_t m0, m1;
11975 mpc_init2 (m0, prec);
11976 mpc_init2 (m1, prec);
11977 mpfr_from_real (mpc_realref (m0), re0, rnd);
11978 mpfr_from_real (mpc_imagref (m0), im0, rnd);
11979 mpfr_from_real (mpc_realref (m1), re1, rnd);
11980 mpfr_from_real (mpc_imagref (m1), im1, rnd);
11981 mpfr_clear_flags ();
11982 inexact = func (m0, m0, m1, crnd);
11983 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
11984 mpc_clear (m0);
11985 mpc_clear (m1);
11989 return result;
11992 /* A wrapper function for builtin folding that prevents warnings for
11993 "statement without effect" and the like, caused by removing the
11994 call node earlier than the warning is generated. */
11996 tree
11997 fold_call_stmt (gcall *stmt, bool ignore)
11999 tree ret = NULL_TREE;
12000 tree fndecl = gimple_call_fndecl (stmt);
12001 location_t loc = gimple_location (stmt);
12002 if (fndecl && fndecl_built_in_p (fndecl)
12003 && !gimple_call_va_arg_pack_p (stmt))
12005 int nargs = gimple_call_num_args (stmt);
12006 tree *args = (nargs > 0
12007 ? gimple_call_arg_ptr (stmt, 0)
12008 : &error_mark_node);
12010 if (avoid_folding_inline_builtin (fndecl))
12011 return NULL_TREE;
12012 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12014 return targetm.fold_builtin (fndecl, nargs, args, ignore);
12016 else
12018 ret = fold_builtin_n (loc, NULL_TREE, fndecl, args, nargs, ignore);
12019 if (ret)
12021 /* Propagate location information from original call to
12022 expansion of builtin. Otherwise things like
12023 maybe_emit_chk_warning, that operate on the expansion
12024 of a builtin, will use the wrong location information. */
12025 if (gimple_has_location (stmt))
12027 tree realret = ret;
12028 if (TREE_CODE (ret) == NOP_EXPR)
12029 realret = TREE_OPERAND (ret, 0);
12030 if (CAN_HAVE_LOCATION_P (realret)
12031 && !EXPR_HAS_LOCATION (realret))
12032 SET_EXPR_LOCATION (realret, loc);
12033 return realret;
12035 return ret;
12039 return NULL_TREE;
12042 /* Look up the function in builtin_decl that corresponds to DECL
12043 and set ASMSPEC as its user assembler name. DECL must be a
12044 function decl that declares a builtin. */
12046 void
12047 set_builtin_user_assembler_name (tree decl, const char *asmspec)
12049 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
12050 && asmspec != 0);
12052 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12053 set_user_assembler_name (builtin, asmspec);
12055 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
12056 && INT_TYPE_SIZE < BITS_PER_WORD)
12058 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
12059 set_user_assembler_libfunc ("ffs", asmspec);
12060 set_optab_libfunc (ffs_optab, mode, "ffs");
12064 /* Return true if DECL is a builtin that expands to a constant or similarly
12065 simple code. */
12066 bool
12067 is_simple_builtin (tree decl)
12069 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
12070 switch (DECL_FUNCTION_CODE (decl))
12072 /* Builtins that expand to constants. */
12073 case BUILT_IN_CONSTANT_P:
12074 case BUILT_IN_EXPECT:
12075 case BUILT_IN_OBJECT_SIZE:
12076 case BUILT_IN_UNREACHABLE:
12077 /* Simple register moves or loads from stack. */
12078 case BUILT_IN_ASSUME_ALIGNED:
12079 case BUILT_IN_RETURN_ADDRESS:
12080 case BUILT_IN_EXTRACT_RETURN_ADDR:
12081 case BUILT_IN_FROB_RETURN_ADDR:
12082 case BUILT_IN_RETURN:
12083 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12084 case BUILT_IN_FRAME_ADDRESS:
12085 case BUILT_IN_VA_END:
12086 case BUILT_IN_STACK_SAVE:
12087 case BUILT_IN_STACK_RESTORE:
12088 /* Exception state returns or moves registers around. */
12089 case BUILT_IN_EH_FILTER:
12090 case BUILT_IN_EH_POINTER:
12091 case BUILT_IN_EH_COPY_VALUES:
12092 return true;
12094 default:
12095 return false;
12098 return false;
12101 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12102 most probably expanded inline into reasonably simple code. This is a
12103 superset of is_simple_builtin. */
12104 bool
12105 is_inexpensive_builtin (tree decl)
12107 if (!decl)
12108 return false;
12109 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12110 return true;
12111 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12112 switch (DECL_FUNCTION_CODE (decl))
12114 case BUILT_IN_ABS:
12115 CASE_BUILT_IN_ALLOCA:
12116 case BUILT_IN_BSWAP16:
12117 case BUILT_IN_BSWAP32:
12118 case BUILT_IN_BSWAP64:
12119 case BUILT_IN_BSWAP128:
12120 case BUILT_IN_CLZ:
12121 case BUILT_IN_CLZIMAX:
12122 case BUILT_IN_CLZL:
12123 case BUILT_IN_CLZLL:
12124 case BUILT_IN_CTZ:
12125 case BUILT_IN_CTZIMAX:
12126 case BUILT_IN_CTZL:
12127 case BUILT_IN_CTZLL:
12128 case BUILT_IN_FFS:
12129 case BUILT_IN_FFSIMAX:
12130 case BUILT_IN_FFSL:
12131 case BUILT_IN_FFSLL:
12132 case BUILT_IN_IMAXABS:
12133 case BUILT_IN_FINITE:
12134 case BUILT_IN_FINITEF:
12135 case BUILT_IN_FINITEL:
12136 case BUILT_IN_FINITED32:
12137 case BUILT_IN_FINITED64:
12138 case BUILT_IN_FINITED128:
12139 case BUILT_IN_FPCLASSIFY:
12140 case BUILT_IN_ISFINITE:
12141 case BUILT_IN_ISINF_SIGN:
12142 case BUILT_IN_ISINF:
12143 case BUILT_IN_ISINFF:
12144 case BUILT_IN_ISINFL:
12145 case BUILT_IN_ISINFD32:
12146 case BUILT_IN_ISINFD64:
12147 case BUILT_IN_ISINFD128:
12148 case BUILT_IN_ISNAN:
12149 case BUILT_IN_ISNANF:
12150 case BUILT_IN_ISNANL:
12151 case BUILT_IN_ISNAND32:
12152 case BUILT_IN_ISNAND64:
12153 case BUILT_IN_ISNAND128:
12154 case BUILT_IN_ISNORMAL:
12155 case BUILT_IN_ISGREATER:
12156 case BUILT_IN_ISGREATEREQUAL:
12157 case BUILT_IN_ISLESS:
12158 case BUILT_IN_ISLESSEQUAL:
12159 case BUILT_IN_ISLESSGREATER:
12160 case BUILT_IN_ISUNORDERED:
12161 case BUILT_IN_VA_ARG_PACK:
12162 case BUILT_IN_VA_ARG_PACK_LEN:
12163 case BUILT_IN_VA_COPY:
12164 case BUILT_IN_TRAP:
12165 case BUILT_IN_SAVEREGS:
12166 case BUILT_IN_POPCOUNTL:
12167 case BUILT_IN_POPCOUNTLL:
12168 case BUILT_IN_POPCOUNTIMAX:
12169 case BUILT_IN_POPCOUNT:
12170 case BUILT_IN_PARITYL:
12171 case BUILT_IN_PARITYLL:
12172 case BUILT_IN_PARITYIMAX:
12173 case BUILT_IN_PARITY:
12174 case BUILT_IN_LABS:
12175 case BUILT_IN_LLABS:
12176 case BUILT_IN_PREFETCH:
12177 case BUILT_IN_ACC_ON_DEVICE:
12178 return true;
12180 default:
12181 return is_simple_builtin (decl);
12184 return false;
12187 /* Return true if T is a constant and the value cast to a target char
12188 can be represented by a host char.
12189 Store the casted char constant in *P if so. */
12191 bool
12192 target_char_cst_p (tree t, char *p)
12194 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
12195 return false;
12197 *p = (char)tree_to_uhwi (t);
12198 return true;
12201 /* Return true if the builtin DECL is implemented in a standard library.
12202 Otherwise returns false which doesn't guarantee it is not (thus the list of
12203 handled builtins below may be incomplete). */
12205 bool
12206 builtin_with_linkage_p (tree decl)
12208 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12209 switch (DECL_FUNCTION_CODE (decl))
12211 CASE_FLT_FN (BUILT_IN_ACOS):
12212 CASE_FLT_FN (BUILT_IN_ACOSH):
12213 CASE_FLT_FN (BUILT_IN_ASIN):
12214 CASE_FLT_FN (BUILT_IN_ASINH):
12215 CASE_FLT_FN (BUILT_IN_ATAN):
12216 CASE_FLT_FN (BUILT_IN_ATANH):
12217 CASE_FLT_FN (BUILT_IN_ATAN2):
12218 CASE_FLT_FN (BUILT_IN_CBRT):
12219 CASE_FLT_FN (BUILT_IN_CEIL):
12220 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
12221 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12222 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
12223 CASE_FLT_FN (BUILT_IN_COS):
12224 CASE_FLT_FN (BUILT_IN_COSH):
12225 CASE_FLT_FN (BUILT_IN_ERF):
12226 CASE_FLT_FN (BUILT_IN_ERFC):
12227 CASE_FLT_FN (BUILT_IN_EXP):
12228 CASE_FLT_FN (BUILT_IN_EXP2):
12229 CASE_FLT_FN (BUILT_IN_EXPM1):
12230 CASE_FLT_FN (BUILT_IN_FABS):
12231 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
12232 CASE_FLT_FN (BUILT_IN_FDIM):
12233 CASE_FLT_FN (BUILT_IN_FLOOR):
12234 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
12235 CASE_FLT_FN (BUILT_IN_FMA):
12236 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
12237 CASE_FLT_FN (BUILT_IN_FMAX):
12238 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
12239 CASE_FLT_FN (BUILT_IN_FMIN):
12240 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
12241 CASE_FLT_FN (BUILT_IN_FMOD):
12242 CASE_FLT_FN (BUILT_IN_FREXP):
12243 CASE_FLT_FN (BUILT_IN_HYPOT):
12244 CASE_FLT_FN (BUILT_IN_ILOGB):
12245 CASE_FLT_FN (BUILT_IN_LDEXP):
12246 CASE_FLT_FN (BUILT_IN_LGAMMA):
12247 CASE_FLT_FN (BUILT_IN_LLRINT):
12248 CASE_FLT_FN (BUILT_IN_LLROUND):
12249 CASE_FLT_FN (BUILT_IN_LOG):
12250 CASE_FLT_FN (BUILT_IN_LOG10):
12251 CASE_FLT_FN (BUILT_IN_LOG1P):
12252 CASE_FLT_FN (BUILT_IN_LOG2):
12253 CASE_FLT_FN (BUILT_IN_LOGB):
12254 CASE_FLT_FN (BUILT_IN_LRINT):
12255 CASE_FLT_FN (BUILT_IN_LROUND):
12256 CASE_FLT_FN (BUILT_IN_MODF):
12257 CASE_FLT_FN (BUILT_IN_NAN):
12258 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12259 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
12260 CASE_FLT_FN (BUILT_IN_NEXTAFTER):
12261 CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
12262 CASE_FLT_FN (BUILT_IN_POW):
12263 CASE_FLT_FN (BUILT_IN_REMAINDER):
12264 CASE_FLT_FN (BUILT_IN_REMQUO):
12265 CASE_FLT_FN (BUILT_IN_RINT):
12266 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
12267 CASE_FLT_FN (BUILT_IN_ROUND):
12268 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
12269 CASE_FLT_FN (BUILT_IN_SCALBLN):
12270 CASE_FLT_FN (BUILT_IN_SCALBN):
12271 CASE_FLT_FN (BUILT_IN_SIN):
12272 CASE_FLT_FN (BUILT_IN_SINH):
12273 CASE_FLT_FN (BUILT_IN_SINCOS):
12274 CASE_FLT_FN (BUILT_IN_SQRT):
12275 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
12276 CASE_FLT_FN (BUILT_IN_TAN):
12277 CASE_FLT_FN (BUILT_IN_TANH):
12278 CASE_FLT_FN (BUILT_IN_TGAMMA):
12279 CASE_FLT_FN (BUILT_IN_TRUNC):
12280 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
12281 return true;
12282 default:
12283 break;
12285 return false;