i386: move alignment defaults to processor_costs.
[official-gcc.git] / gcc / builtins.c
blob3f39d10977d7ea1d4262ea684b05c93bba7a980b
1 /* Expand builtin functions.
2 Copyright (C) 1988-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "params.h"
35 #include "tm_p.h"
36 #include "stringpool.h"
37 #include "tree-vrp.h"
38 #include "tree-ssanames.h"
39 #include "expmed.h"
40 #include "optabs.h"
41 #include "emit-rtl.h"
42 #include "recog.h"
43 #include "diagnostic-core.h"
44 #include "alias.h"
45 #include "fold-const.h"
46 #include "fold-const-call.h"
47 #include "gimple-ssa-warn-restrict.h"
48 #include "stor-layout.h"
49 #include "calls.h"
50 #include "varasm.h"
51 #include "tree-object-size.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
76 struct target_builtins default_target_builtins;
77 #if SWITCHABLE_TARGET
78 struct target_builtins *this_target_builtins = &default_target_builtins;
79 #endif
81 /* Define the names of the builtin function types and codes. */
82 const char *const built_in_class_names[BUILT_IN_LAST]
83 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
85 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
86 const char * built_in_names[(int) END_BUILTINS] =
88 #include "builtins.def"
91 /* Setup an array of builtin_info_type, make sure each element decl is
92 initialized to NULL_TREE. */
93 builtin_info_type builtin_info[(int)END_BUILTINS];
95 /* Non-zero if __builtin_constant_p should be folded right away. */
96 bool force_folding_builtin_constant_p;
98 static rtx c_readstr (const char *, scalar_int_mode);
99 static int target_char_cast (tree, char *);
100 static rtx get_memory_rtx (tree, tree);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx result_vector (int, rtx);
104 static void expand_builtin_prefetch (tree);
105 static rtx expand_builtin_apply_args (void);
106 static rtx expand_builtin_apply_args_1 (void);
107 static rtx expand_builtin_apply (rtx, rtx, rtx);
108 static void expand_builtin_return (rtx);
109 static enum type_class type_to_class (tree);
110 static rtx expand_builtin_classify_type (tree);
111 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
112 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
113 static rtx expand_builtin_interclass_mathfn (tree, rtx);
114 static rtx expand_builtin_sincos (tree);
115 static rtx expand_builtin_cexpi (tree, rtx);
116 static rtx expand_builtin_int_roundingfn (tree, rtx);
117 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
118 static rtx expand_builtin_next_arg (void);
119 static rtx expand_builtin_va_start (tree);
120 static rtx expand_builtin_va_end (tree);
121 static rtx expand_builtin_va_copy (tree);
122 static rtx inline_expand_builtin_string_cmp (tree, rtx);
123 static rtx expand_builtin_strcmp (tree, rtx);
124 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
125 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
126 static rtx expand_builtin_memchr (tree, rtx);
127 static rtx expand_builtin_memcpy (tree, rtx);
128 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
129 rtx target, tree exp, int endp);
130 static rtx expand_builtin_memmove (tree, rtx);
131 static rtx expand_builtin_mempcpy (tree, rtx);
132 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, int);
133 static rtx expand_builtin_strcat (tree, rtx);
134 static rtx expand_builtin_strcpy (tree, rtx);
135 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
136 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
137 static rtx expand_builtin_stpncpy (tree, rtx);
138 static rtx expand_builtin_strncat (tree, rtx);
139 static rtx expand_builtin_strncpy (tree, rtx);
140 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
141 static rtx expand_builtin_memset (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
146 static rtx expand_builtin_alloca (tree);
147 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
148 static rtx expand_builtin_frame_address (tree, tree);
149 static tree stabilize_va_list_loc (location_t, tree, int);
150 static rtx expand_builtin_expect (tree, rtx);
151 static rtx expand_builtin_expect_with_probability (tree, rtx);
152 static tree fold_builtin_constant_p (tree);
153 static tree fold_builtin_classify_type (tree);
154 static tree fold_builtin_strlen (location_t, tree, tree);
155 static tree fold_builtin_inf (location_t, tree, int);
156 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
157 static bool validate_arg (const_tree, enum tree_code code);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_signbit (tree, rtx);
160 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
161 static tree fold_builtin_isascii (location_t, tree);
162 static tree fold_builtin_toascii (location_t, tree);
163 static tree fold_builtin_isdigit (location_t, tree);
164 static tree fold_builtin_fabs (location_t, tree, tree);
165 static tree fold_builtin_abs (location_t, tree, tree);
166 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
167 enum tree_code);
168 static tree fold_builtin_0 (location_t, tree);
169 static tree fold_builtin_1 (location_t, tree, tree);
170 static tree fold_builtin_2 (location_t, tree, tree, tree);
171 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
172 static tree fold_builtin_varargs (location_t, tree, tree*, int);
174 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
175 static tree fold_builtin_strspn (location_t, tree, tree);
176 static tree fold_builtin_strcspn (location_t, tree, tree);
178 static rtx expand_builtin_object_size (tree);
179 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
180 enum built_in_function);
181 static void maybe_emit_chk_warning (tree, enum built_in_function);
182 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
183 static void maybe_emit_free_warning (tree);
184 static tree fold_builtin_object_size (tree, tree);
186 unsigned HOST_WIDE_INT target_newline;
187 unsigned HOST_WIDE_INT target_percent;
188 static unsigned HOST_WIDE_INT target_c;
189 static unsigned HOST_WIDE_INT target_s;
190 char target_percent_c[3];
191 char target_percent_s[3];
192 char target_percent_s_newline[4];
193 static tree do_mpfr_remquo (tree, tree, tree);
194 static tree do_mpfr_lgamma_r (tree, tree, tree);
195 static void expand_builtin_sync_synchronize (void);
197 /* Return true if NAME starts with __builtin_ or __sync_. */
199 static bool
200 is_builtin_name (const char *name)
202 if (strncmp (name, "__builtin_", 10) == 0)
203 return true;
204 if (strncmp (name, "__sync_", 7) == 0)
205 return true;
206 if (strncmp (name, "__atomic_", 9) == 0)
207 return true;
208 return false;
211 /* Return true if NODE should be considered for inline expansion regardless
212 of the optimization level. This means whenever a function is invoked with
213 its "internal" name, which normally contains the prefix "__builtin". */
215 bool
216 called_as_built_in (tree node)
218 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
219 we want the name used to call the function, not the name it
220 will have. */
221 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
222 return is_builtin_name (name);
225 /* Compute values M and N such that M divides (address of EXP - N) and such
226 that N < M. If these numbers can be determined, store M in alignp and N in
227 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
228 *alignp and any bit-offset to *bitposp.
230 Note that the address (and thus the alignment) computed here is based
231 on the address to which a symbol resolves, whereas DECL_ALIGN is based
232 on the address at which an object is actually located. These two
233 addresses are not always the same. For example, on ARM targets,
234 the address &foo of a Thumb function foo() has the lowest bit set,
235 whereas foo() itself starts on an even address.
237 If ADDR_P is true we are taking the address of the memory reference EXP
238 and thus cannot rely on the access taking place. */
240 static bool
241 get_object_alignment_2 (tree exp, unsigned int *alignp,
242 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
244 poly_int64 bitsize, bitpos;
245 tree offset;
246 machine_mode mode;
247 int unsignedp, reversep, volatilep;
248 unsigned int align = BITS_PER_UNIT;
249 bool known_alignment = false;
251 /* Get the innermost object and the constant (bitpos) and possibly
252 variable (offset) offset of the access. */
253 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
254 &unsignedp, &reversep, &volatilep);
256 /* Extract alignment information from the innermost object and
257 possibly adjust bitpos and offset. */
258 if (TREE_CODE (exp) == FUNCTION_DECL)
260 /* Function addresses can encode extra information besides their
261 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
262 allows the low bit to be used as a virtual bit, we know
263 that the address itself must be at least 2-byte aligned. */
264 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
265 align = 2 * BITS_PER_UNIT;
267 else if (TREE_CODE (exp) == LABEL_DECL)
269 else if (TREE_CODE (exp) == CONST_DECL)
271 /* The alignment of a CONST_DECL is determined by its initializer. */
272 exp = DECL_INITIAL (exp);
273 align = TYPE_ALIGN (TREE_TYPE (exp));
274 if (CONSTANT_CLASS_P (exp))
275 align = targetm.constant_alignment (exp, align);
277 known_alignment = true;
279 else if (DECL_P (exp))
281 align = DECL_ALIGN (exp);
282 known_alignment = true;
284 else if (TREE_CODE (exp) == INDIRECT_REF
285 || TREE_CODE (exp) == MEM_REF
286 || TREE_CODE (exp) == TARGET_MEM_REF)
288 tree addr = TREE_OPERAND (exp, 0);
289 unsigned ptr_align;
290 unsigned HOST_WIDE_INT ptr_bitpos;
291 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
293 /* If the address is explicitely aligned, handle that. */
294 if (TREE_CODE (addr) == BIT_AND_EXPR
295 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
297 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
298 ptr_bitmask *= BITS_PER_UNIT;
299 align = least_bit_hwi (ptr_bitmask);
300 addr = TREE_OPERAND (addr, 0);
303 known_alignment
304 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
305 align = MAX (ptr_align, align);
307 /* Re-apply explicit alignment to the bitpos. */
308 ptr_bitpos &= ptr_bitmask;
310 /* The alignment of the pointer operand in a TARGET_MEM_REF
311 has to take the variable offset parts into account. */
312 if (TREE_CODE (exp) == TARGET_MEM_REF)
314 if (TMR_INDEX (exp))
316 unsigned HOST_WIDE_INT step = 1;
317 if (TMR_STEP (exp))
318 step = TREE_INT_CST_LOW (TMR_STEP (exp));
319 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
321 if (TMR_INDEX2 (exp))
322 align = BITS_PER_UNIT;
323 known_alignment = false;
326 /* When EXP is an actual memory reference then we can use
327 TYPE_ALIGN of a pointer indirection to derive alignment.
328 Do so only if get_pointer_alignment_1 did not reveal absolute
329 alignment knowledge and if using that alignment would
330 improve the situation. */
331 unsigned int talign;
332 if (!addr_p && !known_alignment
333 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
334 && talign > align)
335 align = talign;
336 else
338 /* Else adjust bitpos accordingly. */
339 bitpos += ptr_bitpos;
340 if (TREE_CODE (exp) == MEM_REF
341 || TREE_CODE (exp) == TARGET_MEM_REF)
342 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
345 else if (TREE_CODE (exp) == STRING_CST)
347 /* STRING_CST are the only constant objects we allow to be not
348 wrapped inside a CONST_DECL. */
349 align = TYPE_ALIGN (TREE_TYPE (exp));
350 if (CONSTANT_CLASS_P (exp))
351 align = targetm.constant_alignment (exp, align);
353 known_alignment = true;
356 /* If there is a non-constant offset part extract the maximum
357 alignment that can prevail. */
358 if (offset)
360 unsigned int trailing_zeros = tree_ctz (offset);
361 if (trailing_zeros < HOST_BITS_PER_INT)
363 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
364 if (inner)
365 align = MIN (align, inner);
369 /* Account for the alignment of runtime coefficients, so that the constant
370 bitpos is guaranteed to be accurate. */
371 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
372 if (alt_align != 0 && alt_align < align)
374 align = alt_align;
375 known_alignment = false;
378 *alignp = align;
379 *bitposp = bitpos.coeffs[0] & (align - 1);
380 return known_alignment;
383 /* For a memory reference expression EXP compute values M and N such that M
384 divides (&EXP - N) and such that N < M. If these numbers can be determined,
385 store M in alignp and N in *BITPOSP and return true. Otherwise return false
386 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
388 bool
389 get_object_alignment_1 (tree exp, unsigned int *alignp,
390 unsigned HOST_WIDE_INT *bitposp)
392 return get_object_alignment_2 (exp, alignp, bitposp, false);
395 /* Return the alignment in bits of EXP, an object. */
397 unsigned int
398 get_object_alignment (tree exp)
400 unsigned HOST_WIDE_INT bitpos = 0;
401 unsigned int align;
403 get_object_alignment_1 (exp, &align, &bitpos);
405 /* align and bitpos now specify known low bits of the pointer.
406 ptr & (align - 1) == bitpos. */
408 if (bitpos != 0)
409 align = least_bit_hwi (bitpos);
410 return align;
413 /* For a pointer valued expression EXP compute values M and N such that M
414 divides (EXP - N) and such that N < M. If these numbers can be determined,
415 store M in alignp and N in *BITPOSP and return true. Return false if
416 the results are just a conservative approximation.
418 If EXP is not a pointer, false is returned too. */
420 bool
421 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
422 unsigned HOST_WIDE_INT *bitposp)
424 STRIP_NOPS (exp);
426 if (TREE_CODE (exp) == ADDR_EXPR)
427 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
428 alignp, bitposp, true);
429 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
431 unsigned int align;
432 unsigned HOST_WIDE_INT bitpos;
433 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
434 &align, &bitpos);
435 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
436 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
437 else
439 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
440 if (trailing_zeros < HOST_BITS_PER_INT)
442 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
443 if (inner)
444 align = MIN (align, inner);
447 *alignp = align;
448 *bitposp = bitpos & (align - 1);
449 return res;
451 else if (TREE_CODE (exp) == SSA_NAME
452 && POINTER_TYPE_P (TREE_TYPE (exp)))
454 unsigned int ptr_align, ptr_misalign;
455 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
457 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
459 *bitposp = ptr_misalign * BITS_PER_UNIT;
460 *alignp = ptr_align * BITS_PER_UNIT;
461 /* Make sure to return a sensible alignment when the multiplication
462 by BITS_PER_UNIT overflowed. */
463 if (*alignp == 0)
464 *alignp = 1u << (HOST_BITS_PER_INT - 1);
465 /* We cannot really tell whether this result is an approximation. */
466 return false;
468 else
470 *bitposp = 0;
471 *alignp = BITS_PER_UNIT;
472 return false;
475 else if (TREE_CODE (exp) == INTEGER_CST)
477 *alignp = BIGGEST_ALIGNMENT;
478 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
479 & (BIGGEST_ALIGNMENT - 1));
480 return true;
483 *bitposp = 0;
484 *alignp = BITS_PER_UNIT;
485 return false;
488 /* Return the alignment in bits of EXP, a pointer valued expression.
489 The alignment returned is, by default, the alignment of the thing that
490 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
492 Otherwise, look at the expression to see if we can do better, i.e., if the
493 expression is actually pointing at an object whose alignment is tighter. */
495 unsigned int
496 get_pointer_alignment (tree exp)
498 unsigned HOST_WIDE_INT bitpos = 0;
499 unsigned int align;
501 get_pointer_alignment_1 (exp, &align, &bitpos);
503 /* align and bitpos now specify known low bits of the pointer.
504 ptr & (align - 1) == bitpos. */
506 if (bitpos != 0)
507 align = least_bit_hwi (bitpos);
509 return align;
512 /* Return the number of leading non-zero elements in the sequence
513 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
514 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
516 unsigned
517 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
519 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
521 unsigned n;
523 if (eltsize == 1)
525 /* Optimize the common case of plain char. */
526 for (n = 0; n < maxelts; n++)
528 const char *elt = (const char*) ptr + n;
529 if (!*elt)
530 break;
533 else
535 for (n = 0; n < maxelts; n++)
537 const char *elt = (const char*) ptr + n * eltsize;
538 if (!memcmp (elt, "\0\0\0\0", eltsize))
539 break;
542 return n;
545 /* For a call at LOC to a function FN that expects a string in the argument
546 ARG, issue a diagnostic due to it being a called with an argument
547 declared at NONSTR that is a character array with no terminating NUL. */
549 void
550 warn_string_no_nul (location_t loc, const char *fn, tree arg, tree decl)
552 if (TREE_NO_WARNING (arg))
553 return;
555 loc = expansion_point_location_if_in_system_header (loc);
557 if (warning_at (loc, OPT_Wstringop_overflow_,
558 "%qs argument missing terminating nul", fn))
560 inform (DECL_SOURCE_LOCATION (decl),
561 "referenced argument declared here");
562 TREE_NO_WARNING (arg) = 1;
566 /* If EXP refers to an unterminated constant character array return
567 the declaration of the object of which the array is a member or
568 element. Otherwise return null. */
570 tree
571 unterminated_array (tree exp)
573 if (TREE_CODE (exp) == SSA_NAME)
575 gimple *stmt = SSA_NAME_DEF_STMT (exp);
576 if (!is_gimple_assign (stmt))
577 return NULL_TREE;
579 tree rhs1 = gimple_assign_rhs1 (stmt);
580 tree_code code = gimple_assign_rhs_code (stmt);
581 if (code == ADDR_EXPR
582 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == ARRAY_REF)
583 rhs1 = rhs1;
584 else if (code != POINTER_PLUS_EXPR)
585 return NULL_TREE;
587 exp = rhs1;
590 tree nonstr = NULL;
591 if (c_strlen (exp, 1, &nonstr, 1) == NULL && nonstr)
592 return nonstr;
594 return NULL_TREE;
597 /* Compute the length of a null-terminated character string or wide
598 character string handling character sizes of 1, 2, and 4 bytes.
599 TREE_STRING_LENGTH is not the right way because it evaluates to
600 the size of the character array in bytes (as opposed to characters)
601 and because it can contain a zero byte in the middle.
603 ONLY_VALUE should be nonzero if the result is not going to be emitted
604 into the instruction stream and zero if it is going to be expanded.
605 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
606 is returned, otherwise NULL, since
607 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
608 evaluate the side-effects.
610 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
611 accesses. Note that this implies the result is not going to be emitted
612 into the instruction stream.
614 If a not zero-terminated string value is encountered and NONSTR is
615 non-zero, the declaration of the string value is assigned to *NONSTR.
616 *NONSTR is accumulating, thus not cleared on success, therefore it has
617 to be initialized to NULL_TREE by the caller.
619 ELTSIZE is 1 for normal single byte character strings, and 2 or
620 4 for wide characer strings. ELTSIZE is by default 1.
622 The value returned is of type `ssizetype'. */
624 tree
625 c_strlen (tree src, int only_value, tree *nonstr, unsigned eltsize)
627 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
628 STRIP_NOPS (src);
629 if (TREE_CODE (src) == COND_EXPR
630 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
632 tree len1, len2;
634 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, nonstr, eltsize);
635 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, nonstr, eltsize);
636 if (tree_int_cst_equal (len1, len2))
637 return len1;
640 if (TREE_CODE (src) == COMPOUND_EXPR
641 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
642 return c_strlen (TREE_OPERAND (src, 1), only_value, nonstr, eltsize);
644 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
646 /* Offset from the beginning of the string in bytes. */
647 tree byteoff;
648 tree memsize;
649 tree decl;
650 src = string_constant (src, &byteoff, &memsize, &decl);
651 if (src == 0)
652 return NULL_TREE;
654 /* Determine the size of the string element. */
655 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
656 return NULL_TREE;
658 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
659 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
660 in case the latter is less than the size of the array, such as when
661 SRC refers to a short string literal used to initialize a large array.
662 In that case, the elements of the array after the terminating NUL are
663 all NUL. */
664 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
665 strelts = strelts / eltsize;
667 if (!tree_fits_uhwi_p (memsize))
668 return NULL_TREE;
670 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
672 /* PTR can point to the byte representation of any string type, including
673 char* and wchar_t*. */
674 const char *ptr = TREE_STRING_POINTER (src);
676 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
678 /* The code below works only for single byte character types. */
679 if (eltsize != 1)
680 return NULL_TREE;
682 /* If the string has an internal NUL character followed by any
683 non-NUL characters (e.g., "foo\0bar"), we can't compute
684 the offset to the following NUL if we don't know where to
685 start searching for it. */
686 unsigned len = string_length (ptr, eltsize, strelts);
688 /* Return when an embedded null character is found or none at all. */
689 if (len + 1 < strelts)
690 return NULL_TREE;
691 else if (len >= maxelts)
693 if (nonstr && decl)
694 *nonstr = decl;
695 return NULL_TREE;
698 /* For empty strings the result should be zero. */
699 if (len == 0)
700 return ssize_int (0);
702 /* We don't know the starting offset, but we do know that the string
703 has no internal zero bytes. If the offset falls within the bounds
704 of the string subtract the offset from the length of the string,
705 and return that. Otherwise the length is zero. Take care to
706 use SAVE_EXPR in case the OFFSET has side-effects. */
707 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff) : byteoff;
708 offsave = fold_convert (ssizetype, offsave);
709 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
710 build_int_cst (ssizetype, len));
711 tree lenexp = size_diffop_loc (loc, ssize_int (len), offsave);
712 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
713 build_zero_cst (ssizetype));
716 /* Offset from the beginning of the string in elements. */
717 HOST_WIDE_INT eltoff;
719 /* We have a known offset into the string. Start searching there for
720 a null character if we can represent it as a single HOST_WIDE_INT. */
721 if (byteoff == 0)
722 eltoff = 0;
723 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
724 eltoff = -1;
725 else
726 eltoff = tree_to_uhwi (byteoff) / eltsize;
728 /* If the offset is known to be out of bounds, warn, and call strlen at
729 runtime. */
730 if (eltoff < 0 || eltoff >= maxelts)
732 /* Suppress multiple warnings for propagated constant strings. */
733 if (only_value != 2
734 && !TREE_NO_WARNING (src))
736 warning_at (loc, OPT_Warray_bounds,
737 "offset %qwi outside bounds of constant string",
738 eltoff);
739 TREE_NO_WARNING (src) = 1;
741 return NULL_TREE;
744 /* If eltoff is larger than strelts but less than maxelts the
745 string length is zero, since the excess memory will be zero. */
746 if (eltoff > strelts)
747 return ssize_int (0);
749 /* Use strlen to search for the first zero byte. Since any strings
750 constructed with build_string will have nulls appended, we win even
751 if we get handed something like (char[4])"abcd".
753 Since ELTOFF is our starting index into the string, no further
754 calculation is needed. */
755 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
756 strelts - eltoff);
758 /* Don't know what to return if there was no zero termination.
759 Ideally this would turn into a gcc_checking_assert over time. */
760 if (len >= maxelts - eltoff)
762 if (nonstr && decl)
763 *nonstr = decl;
764 return NULL_TREE;
767 return ssize_int (len);
770 /* Return a constant integer corresponding to target reading
771 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
773 static rtx
774 c_readstr (const char *str, scalar_int_mode mode)
776 HOST_WIDE_INT ch;
777 unsigned int i, j;
778 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
780 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
781 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
782 / HOST_BITS_PER_WIDE_INT;
784 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
785 for (i = 0; i < len; i++)
786 tmp[i] = 0;
788 ch = 1;
789 for (i = 0; i < GET_MODE_SIZE (mode); i++)
791 j = i;
792 if (WORDS_BIG_ENDIAN)
793 j = GET_MODE_SIZE (mode) - i - 1;
794 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
795 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
796 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
797 j *= BITS_PER_UNIT;
799 if (ch)
800 ch = (unsigned char) str[i];
801 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
804 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
805 return immed_wide_int_const (c, mode);
808 /* Cast a target constant CST to target CHAR and if that value fits into
809 host char type, return zero and put that value into variable pointed to by
810 P. */
812 static int
813 target_char_cast (tree cst, char *p)
815 unsigned HOST_WIDE_INT val, hostval;
817 if (TREE_CODE (cst) != INTEGER_CST
818 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
819 return 1;
821 /* Do not care if it fits or not right here. */
822 val = TREE_INT_CST_LOW (cst);
824 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
825 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
827 hostval = val;
828 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
829 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
831 if (val != hostval)
832 return 1;
834 *p = hostval;
835 return 0;
838 /* Similar to save_expr, but assumes that arbitrary code is not executed
839 in between the multiple evaluations. In particular, we assume that a
840 non-addressable local variable will not be modified. */
842 static tree
843 builtin_save_expr (tree exp)
845 if (TREE_CODE (exp) == SSA_NAME
846 || (TREE_ADDRESSABLE (exp) == 0
847 && (TREE_CODE (exp) == PARM_DECL
848 || (VAR_P (exp) && !TREE_STATIC (exp)))))
849 return exp;
851 return save_expr (exp);
854 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
855 times to get the address of either a higher stack frame, or a return
856 address located within it (depending on FNDECL_CODE). */
858 static rtx
859 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
861 int i;
862 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
863 if (tem == NULL_RTX)
865 /* For a zero count with __builtin_return_address, we don't care what
866 frame address we return, because target-specific definitions will
867 override us. Therefore frame pointer elimination is OK, and using
868 the soft frame pointer is OK.
870 For a nonzero count, or a zero count with __builtin_frame_address,
871 we require a stable offset from the current frame pointer to the
872 previous one, so we must use the hard frame pointer, and
873 we must disable frame pointer elimination. */
874 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
875 tem = frame_pointer_rtx;
876 else
878 tem = hard_frame_pointer_rtx;
880 /* Tell reload not to eliminate the frame pointer. */
881 crtl->accesses_prior_frames = 1;
885 if (count > 0)
886 SETUP_FRAME_ADDRESSES ();
888 /* On the SPARC, the return address is not in the frame, it is in a
889 register. There is no way to access it off of the current frame
890 pointer, but it can be accessed off the previous frame pointer by
891 reading the value from the register window save area. */
892 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
893 count--;
895 /* Scan back COUNT frames to the specified frame. */
896 for (i = 0; i < count; i++)
898 /* Assume the dynamic chain pointer is in the word that the
899 frame address points to, unless otherwise specified. */
900 tem = DYNAMIC_CHAIN_ADDRESS (tem);
901 tem = memory_address (Pmode, tem);
902 tem = gen_frame_mem (Pmode, tem);
903 tem = copy_to_reg (tem);
906 /* For __builtin_frame_address, return what we've got. But, on
907 the SPARC for example, we may have to add a bias. */
908 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
909 return FRAME_ADDR_RTX (tem);
911 /* For __builtin_return_address, get the return address from that frame. */
912 #ifdef RETURN_ADDR_RTX
913 tem = RETURN_ADDR_RTX (count, tem);
914 #else
915 tem = memory_address (Pmode,
916 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
917 tem = gen_frame_mem (Pmode, tem);
918 #endif
919 return tem;
922 /* Alias set used for setjmp buffer. */
923 static alias_set_type setjmp_alias_set = -1;
925 /* Construct the leading half of a __builtin_setjmp call. Control will
926 return to RECEIVER_LABEL. This is also called directly by the SJLJ
927 exception handling code. */
929 void
930 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
932 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
933 rtx stack_save;
934 rtx mem;
936 if (setjmp_alias_set == -1)
937 setjmp_alias_set = new_alias_set ();
939 buf_addr = convert_memory_address (Pmode, buf_addr);
941 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
943 /* We store the frame pointer and the address of receiver_label in
944 the buffer and use the rest of it for the stack save area, which
945 is machine-dependent. */
947 mem = gen_rtx_MEM (Pmode, buf_addr);
948 set_mem_alias_set (mem, setjmp_alias_set);
949 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
951 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
952 GET_MODE_SIZE (Pmode))),
953 set_mem_alias_set (mem, setjmp_alias_set);
955 emit_move_insn (validize_mem (mem),
956 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
958 stack_save = gen_rtx_MEM (sa_mode,
959 plus_constant (Pmode, buf_addr,
960 2 * GET_MODE_SIZE (Pmode)));
961 set_mem_alias_set (stack_save, setjmp_alias_set);
962 emit_stack_save (SAVE_NONLOCAL, &stack_save);
964 /* If there is further processing to do, do it. */
965 if (targetm.have_builtin_setjmp_setup ())
966 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
968 /* We have a nonlocal label. */
969 cfun->has_nonlocal_label = 1;
972 /* Construct the trailing part of a __builtin_setjmp call. This is
973 also called directly by the SJLJ exception handling code.
974 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
976 void
977 expand_builtin_setjmp_receiver (rtx receiver_label)
979 rtx chain;
981 /* Mark the FP as used when we get here, so we have to make sure it's
982 marked as used by this function. */
983 emit_use (hard_frame_pointer_rtx);
985 /* Mark the static chain as clobbered here so life information
986 doesn't get messed up for it. */
987 chain = rtx_for_static_chain (current_function_decl, true);
988 if (chain && REG_P (chain))
989 emit_clobber (chain);
991 /* Now put in the code to restore the frame pointer, and argument
992 pointer, if needed. */
993 if (! targetm.have_nonlocal_goto ())
995 /* First adjust our frame pointer to its actual value. It was
996 previously set to the start of the virtual area corresponding to
997 the stacked variables when we branched here and now needs to be
998 adjusted to the actual hardware fp value.
1000 Assignments to virtual registers are converted by
1001 instantiate_virtual_regs into the corresponding assignment
1002 to the underlying register (fp in this case) that makes
1003 the original assignment true.
1004 So the following insn will actually be decrementing fp by
1005 TARGET_STARTING_FRAME_OFFSET. */
1006 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
1008 /* Restoring the frame pointer also modifies the hard frame pointer.
1009 Mark it used (so that the previous assignment remains live once
1010 the frame pointer is eliminated) and clobbered (to represent the
1011 implicit update from the assignment). */
1012 emit_use (hard_frame_pointer_rtx);
1013 emit_clobber (hard_frame_pointer_rtx);
1016 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
1018 /* If the argument pointer can be eliminated in favor of the
1019 frame pointer, we don't need to restore it. We assume here
1020 that if such an elimination is present, it can always be used.
1021 This is the case on all known machines; if we don't make this
1022 assumption, we do unnecessary saving on many machines. */
1023 size_t i;
1024 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1026 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1027 if (elim_regs[i].from == ARG_POINTER_REGNUM
1028 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1029 break;
1031 if (i == ARRAY_SIZE (elim_regs))
1033 /* Now restore our arg pointer from the address at which it
1034 was saved in our stack frame. */
1035 emit_move_insn (crtl->args.internal_arg_pointer,
1036 copy_to_reg (get_arg_pointer_save_area ()));
1040 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
1041 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
1042 else if (targetm.have_nonlocal_goto_receiver ())
1043 emit_insn (targetm.gen_nonlocal_goto_receiver ());
1044 else
1045 { /* Nothing */ }
1047 /* We must not allow the code we just generated to be reordered by
1048 scheduling. Specifically, the update of the frame pointer must
1049 happen immediately, not later. */
1050 emit_insn (gen_blockage ());
1053 /* __builtin_longjmp is passed a pointer to an array of five words (not
1054 all will be used on all machines). It operates similarly to the C
1055 library function of the same name, but is more efficient. Much of
1056 the code below is copied from the handling of non-local gotos. */
1058 static void
1059 expand_builtin_longjmp (rtx buf_addr, rtx value)
1061 rtx fp, lab, stack;
1062 rtx_insn *insn, *last;
1063 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1065 /* DRAP is needed for stack realign if longjmp is expanded to current
1066 function */
1067 if (SUPPORTS_STACK_ALIGNMENT)
1068 crtl->need_drap = true;
1070 if (setjmp_alias_set == -1)
1071 setjmp_alias_set = new_alias_set ();
1073 buf_addr = convert_memory_address (Pmode, buf_addr);
1075 buf_addr = force_reg (Pmode, buf_addr);
1077 /* We require that the user must pass a second argument of 1, because
1078 that is what builtin_setjmp will return. */
1079 gcc_assert (value == const1_rtx);
1081 last = get_last_insn ();
1082 if (targetm.have_builtin_longjmp ())
1083 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1084 else
1086 fp = gen_rtx_MEM (Pmode, buf_addr);
1087 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1088 GET_MODE_SIZE (Pmode)));
1090 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1091 2 * GET_MODE_SIZE (Pmode)));
1092 set_mem_alias_set (fp, setjmp_alias_set);
1093 set_mem_alias_set (lab, setjmp_alias_set);
1094 set_mem_alias_set (stack, setjmp_alias_set);
1096 /* Pick up FP, label, and SP from the block and jump. This code is
1097 from expand_goto in stmt.c; see there for detailed comments. */
1098 if (targetm.have_nonlocal_goto ())
1099 /* We have to pass a value to the nonlocal_goto pattern that will
1100 get copied into the static_chain pointer, but it does not matter
1101 what that value is, because builtin_setjmp does not use it. */
1102 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1103 else
1105 lab = copy_to_reg (lab);
1107 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1108 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1110 emit_move_insn (hard_frame_pointer_rtx, fp);
1111 emit_stack_restore (SAVE_NONLOCAL, stack);
1113 emit_use (hard_frame_pointer_rtx);
1114 emit_use (stack_pointer_rtx);
1115 emit_indirect_jump (lab);
1119 /* Search backwards and mark the jump insn as a non-local goto.
1120 Note that this precludes the use of __builtin_longjmp to a
1121 __builtin_setjmp target in the same function. However, we've
1122 already cautioned the user that these functions are for
1123 internal exception handling use only. */
1124 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1126 gcc_assert (insn != last);
1128 if (JUMP_P (insn))
1130 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1131 break;
1133 else if (CALL_P (insn))
1134 break;
1138 static inline bool
1139 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1141 return (iter->i < iter->n);
1144 /* This function validates the types of a function call argument list
1145 against a specified list of tree_codes. If the last specifier is a 0,
1146 that represents an ellipsis, otherwise the last specifier must be a
1147 VOID_TYPE. */
1149 static bool
1150 validate_arglist (const_tree callexpr, ...)
1152 enum tree_code code;
1153 bool res = 0;
1154 va_list ap;
1155 const_call_expr_arg_iterator iter;
1156 const_tree arg;
1158 va_start (ap, callexpr);
1159 init_const_call_expr_arg_iterator (callexpr, &iter);
1161 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1162 tree fn = CALL_EXPR_FN (callexpr);
1163 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1165 for (unsigned argno = 1; ; ++argno)
1167 code = (enum tree_code) va_arg (ap, int);
1169 switch (code)
1171 case 0:
1172 /* This signifies an ellipses, any further arguments are all ok. */
1173 res = true;
1174 goto end;
1175 case VOID_TYPE:
1176 /* This signifies an endlink, if no arguments remain, return
1177 true, otherwise return false. */
1178 res = !more_const_call_expr_args_p (&iter);
1179 goto end;
1180 case POINTER_TYPE:
1181 /* The actual argument must be nonnull when either the whole
1182 called function has been declared nonnull, or when the formal
1183 argument corresponding to the actual argument has been. */
1184 if (argmap
1185 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1187 arg = next_const_call_expr_arg (&iter);
1188 if (!validate_arg (arg, code) || integer_zerop (arg))
1189 goto end;
1190 break;
1192 /* FALLTHRU */
1193 default:
1194 /* If no parameters remain or the parameter's code does not
1195 match the specified code, return false. Otherwise continue
1196 checking any remaining arguments. */
1197 arg = next_const_call_expr_arg (&iter);
1198 if (!validate_arg (arg, code))
1199 goto end;
1200 break;
1204 /* We need gotos here since we can only have one VA_CLOSE in a
1205 function. */
1206 end: ;
1207 va_end (ap);
1209 BITMAP_FREE (argmap);
1211 return res;
1214 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1215 and the address of the save area. */
1217 static rtx
1218 expand_builtin_nonlocal_goto (tree exp)
1220 tree t_label, t_save_area;
1221 rtx r_label, r_save_area, r_fp, r_sp;
1222 rtx_insn *insn;
1224 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1225 return NULL_RTX;
1227 t_label = CALL_EXPR_ARG (exp, 0);
1228 t_save_area = CALL_EXPR_ARG (exp, 1);
1230 r_label = expand_normal (t_label);
1231 r_label = convert_memory_address (Pmode, r_label);
1232 r_save_area = expand_normal (t_save_area);
1233 r_save_area = convert_memory_address (Pmode, r_save_area);
1234 /* Copy the address of the save location to a register just in case it was
1235 based on the frame pointer. */
1236 r_save_area = copy_to_reg (r_save_area);
1237 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1238 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1239 plus_constant (Pmode, r_save_area,
1240 GET_MODE_SIZE (Pmode)));
1242 crtl->has_nonlocal_goto = 1;
1244 /* ??? We no longer need to pass the static chain value, afaik. */
1245 if (targetm.have_nonlocal_goto ())
1246 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1247 else
1249 r_label = copy_to_reg (r_label);
1251 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1252 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1254 /* Restore frame pointer for containing function. */
1255 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1256 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1258 /* USE of hard_frame_pointer_rtx added for consistency;
1259 not clear if really needed. */
1260 emit_use (hard_frame_pointer_rtx);
1261 emit_use (stack_pointer_rtx);
1263 /* If the architecture is using a GP register, we must
1264 conservatively assume that the target function makes use of it.
1265 The prologue of functions with nonlocal gotos must therefore
1266 initialize the GP register to the appropriate value, and we
1267 must then make sure that this value is live at the point
1268 of the jump. (Note that this doesn't necessarily apply
1269 to targets with a nonlocal_goto pattern; they are free
1270 to implement it in their own way. Note also that this is
1271 a no-op if the GP register is a global invariant.) */
1272 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1273 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1274 emit_use (pic_offset_table_rtx);
1276 emit_indirect_jump (r_label);
1279 /* Search backwards to the jump insn and mark it as a
1280 non-local goto. */
1281 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1283 if (JUMP_P (insn))
1285 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1286 break;
1288 else if (CALL_P (insn))
1289 break;
1292 return const0_rtx;
1295 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1296 (not all will be used on all machines) that was passed to __builtin_setjmp.
1297 It updates the stack pointer in that block to the current value. This is
1298 also called directly by the SJLJ exception handling code. */
1300 void
1301 expand_builtin_update_setjmp_buf (rtx buf_addr)
1303 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1304 buf_addr = convert_memory_address (Pmode, buf_addr);
1305 rtx stack_save
1306 = gen_rtx_MEM (sa_mode,
1307 memory_address
1308 (sa_mode,
1309 plus_constant (Pmode, buf_addr,
1310 2 * GET_MODE_SIZE (Pmode))));
1312 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1315 /* Expand a call to __builtin_prefetch. For a target that does not support
1316 data prefetch, evaluate the memory address argument in case it has side
1317 effects. */
1319 static void
1320 expand_builtin_prefetch (tree exp)
1322 tree arg0, arg1, arg2;
1323 int nargs;
1324 rtx op0, op1, op2;
1326 if (!validate_arglist (exp, POINTER_TYPE, 0))
1327 return;
1329 arg0 = CALL_EXPR_ARG (exp, 0);
1331 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1332 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1333 locality). */
1334 nargs = call_expr_nargs (exp);
1335 if (nargs > 1)
1336 arg1 = CALL_EXPR_ARG (exp, 1);
1337 else
1338 arg1 = integer_zero_node;
1339 if (nargs > 2)
1340 arg2 = CALL_EXPR_ARG (exp, 2);
1341 else
1342 arg2 = integer_three_node;
1344 /* Argument 0 is an address. */
1345 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1347 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1348 if (TREE_CODE (arg1) != INTEGER_CST)
1350 error ("second argument to %<__builtin_prefetch%> must be a constant");
1351 arg1 = integer_zero_node;
1353 op1 = expand_normal (arg1);
1354 /* Argument 1 must be either zero or one. */
1355 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1357 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1358 " using zero");
1359 op1 = const0_rtx;
1362 /* Argument 2 (locality) must be a compile-time constant int. */
1363 if (TREE_CODE (arg2) != INTEGER_CST)
1365 error ("third argument to %<__builtin_prefetch%> must be a constant");
1366 arg2 = integer_zero_node;
1368 op2 = expand_normal (arg2);
1369 /* Argument 2 must be 0, 1, 2, or 3. */
1370 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1372 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1373 op2 = const0_rtx;
1376 if (targetm.have_prefetch ())
1378 struct expand_operand ops[3];
1380 create_address_operand (&ops[0], op0);
1381 create_integer_operand (&ops[1], INTVAL (op1));
1382 create_integer_operand (&ops[2], INTVAL (op2));
1383 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1384 return;
1387 /* Don't do anything with direct references to volatile memory, but
1388 generate code to handle other side effects. */
1389 if (!MEM_P (op0) && side_effects_p (op0))
1390 emit_insn (op0);
1393 /* Get a MEM rtx for expression EXP which is the address of an operand
1394 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1395 the maximum length of the block of memory that might be accessed or
1396 NULL if unknown. */
1398 static rtx
1399 get_memory_rtx (tree exp, tree len)
1401 tree orig_exp = exp;
1402 rtx addr, mem;
1404 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1405 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1406 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1407 exp = TREE_OPERAND (exp, 0);
1409 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1410 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1412 /* Get an expression we can use to find the attributes to assign to MEM.
1413 First remove any nops. */
1414 while (CONVERT_EXPR_P (exp)
1415 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1416 exp = TREE_OPERAND (exp, 0);
1418 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1419 (as builtin stringops may alias with anything). */
1420 exp = fold_build2 (MEM_REF,
1421 build_array_type (char_type_node,
1422 build_range_type (sizetype,
1423 size_one_node, len)),
1424 exp, build_int_cst (ptr_type_node, 0));
1426 /* If the MEM_REF has no acceptable address, try to get the base object
1427 from the original address we got, and build an all-aliasing
1428 unknown-sized access to that one. */
1429 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1430 set_mem_attributes (mem, exp, 0);
1431 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1432 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1433 0))))
1435 exp = build_fold_addr_expr (exp);
1436 exp = fold_build2 (MEM_REF,
1437 build_array_type (char_type_node,
1438 build_range_type (sizetype,
1439 size_zero_node,
1440 NULL)),
1441 exp, build_int_cst (ptr_type_node, 0));
1442 set_mem_attributes (mem, exp, 0);
1444 set_mem_alias_set (mem, 0);
1445 return mem;
1448 /* Built-in functions to perform an untyped call and return. */
1450 #define apply_args_mode \
1451 (this_target_builtins->x_apply_args_mode)
1452 #define apply_result_mode \
1453 (this_target_builtins->x_apply_result_mode)
1455 /* Return the size required for the block returned by __builtin_apply_args,
1456 and initialize apply_args_mode. */
1458 static int
1459 apply_args_size (void)
1461 static int size = -1;
1462 int align;
1463 unsigned int regno;
1465 /* The values computed by this function never change. */
1466 if (size < 0)
1468 /* The first value is the incoming arg-pointer. */
1469 size = GET_MODE_SIZE (Pmode);
1471 /* The second value is the structure value address unless this is
1472 passed as an "invisible" first argument. */
1473 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1474 size += GET_MODE_SIZE (Pmode);
1476 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1477 if (FUNCTION_ARG_REGNO_P (regno))
1479 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1481 gcc_assert (mode != VOIDmode);
1483 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1484 if (size % align != 0)
1485 size = CEIL (size, align) * align;
1486 size += GET_MODE_SIZE (mode);
1487 apply_args_mode[regno] = mode;
1489 else
1491 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1494 return size;
1497 /* Return the size required for the block returned by __builtin_apply,
1498 and initialize apply_result_mode. */
1500 static int
1501 apply_result_size (void)
1503 static int size = -1;
1504 int align, regno;
1506 /* The values computed by this function never change. */
1507 if (size < 0)
1509 size = 0;
1511 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1512 if (targetm.calls.function_value_regno_p (regno))
1514 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1516 gcc_assert (mode != VOIDmode);
1518 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1519 if (size % align != 0)
1520 size = CEIL (size, align) * align;
1521 size += GET_MODE_SIZE (mode);
1522 apply_result_mode[regno] = mode;
1524 else
1525 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1527 /* Allow targets that use untyped_call and untyped_return to override
1528 the size so that machine-specific information can be stored here. */
1529 #ifdef APPLY_RESULT_SIZE
1530 size = APPLY_RESULT_SIZE;
1531 #endif
1533 return size;
1536 /* Create a vector describing the result block RESULT. If SAVEP is true,
1537 the result block is used to save the values; otherwise it is used to
1538 restore the values. */
1540 static rtx
1541 result_vector (int savep, rtx result)
1543 int regno, size, align, nelts;
1544 fixed_size_mode mode;
1545 rtx reg, mem;
1546 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1548 size = nelts = 0;
1549 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1550 if ((mode = apply_result_mode[regno]) != VOIDmode)
1552 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1553 if (size % align != 0)
1554 size = CEIL (size, align) * align;
1555 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1556 mem = adjust_address (result, mode, size);
1557 savevec[nelts++] = (savep
1558 ? gen_rtx_SET (mem, reg)
1559 : gen_rtx_SET (reg, mem));
1560 size += GET_MODE_SIZE (mode);
1562 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1565 /* Save the state required to perform an untyped call with the same
1566 arguments as were passed to the current function. */
1568 static rtx
1569 expand_builtin_apply_args_1 (void)
1571 rtx registers, tem;
1572 int size, align, regno;
1573 fixed_size_mode mode;
1574 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1576 /* Create a block where the arg-pointer, structure value address,
1577 and argument registers can be saved. */
1578 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1580 /* Walk past the arg-pointer and structure value address. */
1581 size = GET_MODE_SIZE (Pmode);
1582 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1583 size += GET_MODE_SIZE (Pmode);
1585 /* Save each register used in calling a function to the block. */
1586 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1587 if ((mode = apply_args_mode[regno]) != VOIDmode)
1589 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1590 if (size % align != 0)
1591 size = CEIL (size, align) * align;
1593 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1595 emit_move_insn (adjust_address (registers, mode, size), tem);
1596 size += GET_MODE_SIZE (mode);
1599 /* Save the arg pointer to the block. */
1600 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1601 /* We need the pointer as the caller actually passed them to us, not
1602 as we might have pretended they were passed. Make sure it's a valid
1603 operand, as emit_move_insn isn't expected to handle a PLUS. */
1604 if (STACK_GROWS_DOWNWARD)
1606 = force_operand (plus_constant (Pmode, tem,
1607 crtl->args.pretend_args_size),
1608 NULL_RTX);
1609 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1611 size = GET_MODE_SIZE (Pmode);
1613 /* Save the structure value address unless this is passed as an
1614 "invisible" first argument. */
1615 if (struct_incoming_value)
1617 emit_move_insn (adjust_address (registers, Pmode, size),
1618 copy_to_reg (struct_incoming_value));
1619 size += GET_MODE_SIZE (Pmode);
1622 /* Return the address of the block. */
1623 return copy_addr_to_reg (XEXP (registers, 0));
1626 /* __builtin_apply_args returns block of memory allocated on
1627 the stack into which is stored the arg pointer, structure
1628 value address, static chain, and all the registers that might
1629 possibly be used in performing a function call. The code is
1630 moved to the start of the function so the incoming values are
1631 saved. */
1633 static rtx
1634 expand_builtin_apply_args (void)
1636 /* Don't do __builtin_apply_args more than once in a function.
1637 Save the result of the first call and reuse it. */
1638 if (apply_args_value != 0)
1639 return apply_args_value;
1641 /* When this function is called, it means that registers must be
1642 saved on entry to this function. So we migrate the
1643 call to the first insn of this function. */
1644 rtx temp;
1646 start_sequence ();
1647 temp = expand_builtin_apply_args_1 ();
1648 rtx_insn *seq = get_insns ();
1649 end_sequence ();
1651 apply_args_value = temp;
1653 /* Put the insns after the NOTE that starts the function.
1654 If this is inside a start_sequence, make the outer-level insn
1655 chain current, so the code is placed at the start of the
1656 function. If internal_arg_pointer is a non-virtual pseudo,
1657 it needs to be placed after the function that initializes
1658 that pseudo. */
1659 push_topmost_sequence ();
1660 if (REG_P (crtl->args.internal_arg_pointer)
1661 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1662 emit_insn_before (seq, parm_birth_insn);
1663 else
1664 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1665 pop_topmost_sequence ();
1666 return temp;
1670 /* Perform an untyped call and save the state required to perform an
1671 untyped return of whatever value was returned by the given function. */
1673 static rtx
1674 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1676 int size, align, regno;
1677 fixed_size_mode mode;
1678 rtx incoming_args, result, reg, dest, src;
1679 rtx_call_insn *call_insn;
1680 rtx old_stack_level = 0;
1681 rtx call_fusage = 0;
1682 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1684 arguments = convert_memory_address (Pmode, arguments);
1686 /* Create a block where the return registers can be saved. */
1687 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1689 /* Fetch the arg pointer from the ARGUMENTS block. */
1690 incoming_args = gen_reg_rtx (Pmode);
1691 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1692 if (!STACK_GROWS_DOWNWARD)
1693 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1694 incoming_args, 0, OPTAB_LIB_WIDEN);
1696 /* Push a new argument block and copy the arguments. Do not allow
1697 the (potential) memcpy call below to interfere with our stack
1698 manipulations. */
1699 do_pending_stack_adjust ();
1700 NO_DEFER_POP;
1702 /* Save the stack with nonlocal if available. */
1703 if (targetm.have_save_stack_nonlocal ())
1704 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1705 else
1706 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1708 /* Allocate a block of memory onto the stack and copy the memory
1709 arguments to the outgoing arguments address. We can pass TRUE
1710 as the 4th argument because we just saved the stack pointer
1711 and will restore it right after the call. */
1712 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1714 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1715 may have already set current_function_calls_alloca to true.
1716 current_function_calls_alloca won't be set if argsize is zero,
1717 so we have to guarantee need_drap is true here. */
1718 if (SUPPORTS_STACK_ALIGNMENT)
1719 crtl->need_drap = true;
1721 dest = virtual_outgoing_args_rtx;
1722 if (!STACK_GROWS_DOWNWARD)
1724 if (CONST_INT_P (argsize))
1725 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1726 else
1727 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1729 dest = gen_rtx_MEM (BLKmode, dest);
1730 set_mem_align (dest, PARM_BOUNDARY);
1731 src = gen_rtx_MEM (BLKmode, incoming_args);
1732 set_mem_align (src, PARM_BOUNDARY);
1733 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1735 /* Refer to the argument block. */
1736 apply_args_size ();
1737 arguments = gen_rtx_MEM (BLKmode, arguments);
1738 set_mem_align (arguments, PARM_BOUNDARY);
1740 /* Walk past the arg-pointer and structure value address. */
1741 size = GET_MODE_SIZE (Pmode);
1742 if (struct_value)
1743 size += GET_MODE_SIZE (Pmode);
1745 /* Restore each of the registers previously saved. Make USE insns
1746 for each of these registers for use in making the call. */
1747 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1748 if ((mode = apply_args_mode[regno]) != VOIDmode)
1750 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1751 if (size % align != 0)
1752 size = CEIL (size, align) * align;
1753 reg = gen_rtx_REG (mode, regno);
1754 emit_move_insn (reg, adjust_address (arguments, mode, size));
1755 use_reg (&call_fusage, reg);
1756 size += GET_MODE_SIZE (mode);
1759 /* Restore the structure value address unless this is passed as an
1760 "invisible" first argument. */
1761 size = GET_MODE_SIZE (Pmode);
1762 if (struct_value)
1764 rtx value = gen_reg_rtx (Pmode);
1765 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1766 emit_move_insn (struct_value, value);
1767 if (REG_P (struct_value))
1768 use_reg (&call_fusage, struct_value);
1769 size += GET_MODE_SIZE (Pmode);
1772 /* All arguments and registers used for the call are set up by now! */
1773 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1775 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1776 and we don't want to load it into a register as an optimization,
1777 because prepare_call_address already did it if it should be done. */
1778 if (GET_CODE (function) != SYMBOL_REF)
1779 function = memory_address (FUNCTION_MODE, function);
1781 /* Generate the actual call instruction and save the return value. */
1782 if (targetm.have_untyped_call ())
1784 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1785 emit_call_insn (targetm.gen_untyped_call (mem, result,
1786 result_vector (1, result)));
1788 else if (targetm.have_call_value ())
1790 rtx valreg = 0;
1792 /* Locate the unique return register. It is not possible to
1793 express a call that sets more than one return register using
1794 call_value; use untyped_call for that. In fact, untyped_call
1795 only needs to save the return registers in the given block. */
1796 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1797 if ((mode = apply_result_mode[regno]) != VOIDmode)
1799 gcc_assert (!valreg); /* have_untyped_call required. */
1801 valreg = gen_rtx_REG (mode, regno);
1804 emit_insn (targetm.gen_call_value (valreg,
1805 gen_rtx_MEM (FUNCTION_MODE, function),
1806 const0_rtx, NULL_RTX, const0_rtx));
1808 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1810 else
1811 gcc_unreachable ();
1813 /* Find the CALL insn we just emitted, and attach the register usage
1814 information. */
1815 call_insn = last_call_insn ();
1816 add_function_usage_to (call_insn, call_fusage);
1818 /* Restore the stack. */
1819 if (targetm.have_save_stack_nonlocal ())
1820 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1821 else
1822 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1823 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1825 OK_DEFER_POP;
1827 /* Return the address of the result block. */
1828 result = copy_addr_to_reg (XEXP (result, 0));
1829 return convert_memory_address (ptr_mode, result);
1832 /* Perform an untyped return. */
1834 static void
1835 expand_builtin_return (rtx result)
1837 int size, align, regno;
1838 fixed_size_mode mode;
1839 rtx reg;
1840 rtx_insn *call_fusage = 0;
1842 result = convert_memory_address (Pmode, result);
1844 apply_result_size ();
1845 result = gen_rtx_MEM (BLKmode, result);
1847 if (targetm.have_untyped_return ())
1849 rtx vector = result_vector (0, result);
1850 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1851 emit_barrier ();
1852 return;
1855 /* Restore the return value and note that each value is used. */
1856 size = 0;
1857 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1858 if ((mode = apply_result_mode[regno]) != VOIDmode)
1860 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1861 if (size % align != 0)
1862 size = CEIL (size, align) * align;
1863 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1864 emit_move_insn (reg, adjust_address (result, mode, size));
1866 push_to_sequence (call_fusage);
1867 emit_use (reg);
1868 call_fusage = get_insns ();
1869 end_sequence ();
1870 size += GET_MODE_SIZE (mode);
1873 /* Put the USE insns before the return. */
1874 emit_insn (call_fusage);
1876 /* Return whatever values was restored by jumping directly to the end
1877 of the function. */
1878 expand_naked_return ();
1881 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1883 static enum type_class
1884 type_to_class (tree type)
1886 switch (TREE_CODE (type))
1888 case VOID_TYPE: return void_type_class;
1889 case INTEGER_TYPE: return integer_type_class;
1890 case ENUMERAL_TYPE: return enumeral_type_class;
1891 case BOOLEAN_TYPE: return boolean_type_class;
1892 case POINTER_TYPE: return pointer_type_class;
1893 case REFERENCE_TYPE: return reference_type_class;
1894 case OFFSET_TYPE: return offset_type_class;
1895 case REAL_TYPE: return real_type_class;
1896 case COMPLEX_TYPE: return complex_type_class;
1897 case FUNCTION_TYPE: return function_type_class;
1898 case METHOD_TYPE: return method_type_class;
1899 case RECORD_TYPE: return record_type_class;
1900 case UNION_TYPE:
1901 case QUAL_UNION_TYPE: return union_type_class;
1902 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1903 ? string_type_class : array_type_class);
1904 case LANG_TYPE: return lang_type_class;
1905 default: return no_type_class;
1909 /* Expand a call EXP to __builtin_classify_type. */
1911 static rtx
1912 expand_builtin_classify_type (tree exp)
1914 if (call_expr_nargs (exp))
1915 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1916 return GEN_INT (no_type_class);
1919 /* This helper macro, meant to be used in mathfn_built_in below, determines
1920 which among a set of builtin math functions is appropriate for a given type
1921 mode. The `F' (float) and `L' (long double) are automatically generated
1922 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1923 types, there are additional types that are considered with 'F32', 'F64',
1924 'F128', etc. suffixes. */
1925 #define CASE_MATHFN(MATHFN) \
1926 CASE_CFN_##MATHFN: \
1927 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1928 fcodel = BUILT_IN_##MATHFN##L ; break;
1929 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1930 types. */
1931 #define CASE_MATHFN_FLOATN(MATHFN) \
1932 CASE_CFN_##MATHFN: \
1933 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1934 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1935 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1936 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1937 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1938 break;
1939 /* Similar to above, but appends _R after any F/L suffix. */
1940 #define CASE_MATHFN_REENT(MATHFN) \
1941 case CFN_BUILT_IN_##MATHFN##_R: \
1942 case CFN_BUILT_IN_##MATHFN##F_R: \
1943 case CFN_BUILT_IN_##MATHFN##L_R: \
1944 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1945 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1947 /* Return a function equivalent to FN but operating on floating-point
1948 values of type TYPE, or END_BUILTINS if no such function exists.
1949 This is purely an operation on function codes; it does not guarantee
1950 that the target actually has an implementation of the function. */
1952 static built_in_function
1953 mathfn_built_in_2 (tree type, combined_fn fn)
1955 tree mtype;
1956 built_in_function fcode, fcodef, fcodel;
1957 built_in_function fcodef16 = END_BUILTINS;
1958 built_in_function fcodef32 = END_BUILTINS;
1959 built_in_function fcodef64 = END_BUILTINS;
1960 built_in_function fcodef128 = END_BUILTINS;
1961 built_in_function fcodef32x = END_BUILTINS;
1962 built_in_function fcodef64x = END_BUILTINS;
1963 built_in_function fcodef128x = END_BUILTINS;
1965 switch (fn)
1967 CASE_MATHFN (ACOS)
1968 CASE_MATHFN (ACOSH)
1969 CASE_MATHFN (ASIN)
1970 CASE_MATHFN (ASINH)
1971 CASE_MATHFN (ATAN)
1972 CASE_MATHFN (ATAN2)
1973 CASE_MATHFN (ATANH)
1974 CASE_MATHFN (CBRT)
1975 CASE_MATHFN_FLOATN (CEIL)
1976 CASE_MATHFN (CEXPI)
1977 CASE_MATHFN_FLOATN (COPYSIGN)
1978 CASE_MATHFN (COS)
1979 CASE_MATHFN (COSH)
1980 CASE_MATHFN (DREM)
1981 CASE_MATHFN (ERF)
1982 CASE_MATHFN (ERFC)
1983 CASE_MATHFN (EXP)
1984 CASE_MATHFN (EXP10)
1985 CASE_MATHFN (EXP2)
1986 CASE_MATHFN (EXPM1)
1987 CASE_MATHFN (FABS)
1988 CASE_MATHFN (FDIM)
1989 CASE_MATHFN_FLOATN (FLOOR)
1990 CASE_MATHFN_FLOATN (FMA)
1991 CASE_MATHFN_FLOATN (FMAX)
1992 CASE_MATHFN_FLOATN (FMIN)
1993 CASE_MATHFN (FMOD)
1994 CASE_MATHFN (FREXP)
1995 CASE_MATHFN (GAMMA)
1996 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1997 CASE_MATHFN (HUGE_VAL)
1998 CASE_MATHFN (HYPOT)
1999 CASE_MATHFN (ILOGB)
2000 CASE_MATHFN (ICEIL)
2001 CASE_MATHFN (IFLOOR)
2002 CASE_MATHFN (INF)
2003 CASE_MATHFN (IRINT)
2004 CASE_MATHFN (IROUND)
2005 CASE_MATHFN (ISINF)
2006 CASE_MATHFN (J0)
2007 CASE_MATHFN (J1)
2008 CASE_MATHFN (JN)
2009 CASE_MATHFN (LCEIL)
2010 CASE_MATHFN (LDEXP)
2011 CASE_MATHFN (LFLOOR)
2012 CASE_MATHFN (LGAMMA)
2013 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
2014 CASE_MATHFN (LLCEIL)
2015 CASE_MATHFN (LLFLOOR)
2016 CASE_MATHFN (LLRINT)
2017 CASE_MATHFN (LLROUND)
2018 CASE_MATHFN (LOG)
2019 CASE_MATHFN (LOG10)
2020 CASE_MATHFN (LOG1P)
2021 CASE_MATHFN (LOG2)
2022 CASE_MATHFN (LOGB)
2023 CASE_MATHFN (LRINT)
2024 CASE_MATHFN (LROUND)
2025 CASE_MATHFN (MODF)
2026 CASE_MATHFN (NAN)
2027 CASE_MATHFN (NANS)
2028 CASE_MATHFN_FLOATN (NEARBYINT)
2029 CASE_MATHFN (NEXTAFTER)
2030 CASE_MATHFN (NEXTTOWARD)
2031 CASE_MATHFN (POW)
2032 CASE_MATHFN (POWI)
2033 CASE_MATHFN (POW10)
2034 CASE_MATHFN (REMAINDER)
2035 CASE_MATHFN (REMQUO)
2036 CASE_MATHFN_FLOATN (RINT)
2037 CASE_MATHFN_FLOATN (ROUND)
2038 CASE_MATHFN (SCALB)
2039 CASE_MATHFN (SCALBLN)
2040 CASE_MATHFN (SCALBN)
2041 CASE_MATHFN (SIGNBIT)
2042 CASE_MATHFN (SIGNIFICAND)
2043 CASE_MATHFN (SIN)
2044 CASE_MATHFN (SINCOS)
2045 CASE_MATHFN (SINH)
2046 CASE_MATHFN_FLOATN (SQRT)
2047 CASE_MATHFN (TAN)
2048 CASE_MATHFN (TANH)
2049 CASE_MATHFN (TGAMMA)
2050 CASE_MATHFN_FLOATN (TRUNC)
2051 CASE_MATHFN (Y0)
2052 CASE_MATHFN (Y1)
2053 CASE_MATHFN (YN)
2055 default:
2056 return END_BUILTINS;
2059 mtype = TYPE_MAIN_VARIANT (type);
2060 if (mtype == double_type_node)
2061 return fcode;
2062 else if (mtype == float_type_node)
2063 return fcodef;
2064 else if (mtype == long_double_type_node)
2065 return fcodel;
2066 else if (mtype == float16_type_node)
2067 return fcodef16;
2068 else if (mtype == float32_type_node)
2069 return fcodef32;
2070 else if (mtype == float64_type_node)
2071 return fcodef64;
2072 else if (mtype == float128_type_node)
2073 return fcodef128;
2074 else if (mtype == float32x_type_node)
2075 return fcodef32x;
2076 else if (mtype == float64x_type_node)
2077 return fcodef64x;
2078 else if (mtype == float128x_type_node)
2079 return fcodef128x;
2080 else
2081 return END_BUILTINS;
2084 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2085 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2086 otherwise use the explicit declaration. If we can't do the conversion,
2087 return null. */
2089 static tree
2090 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2092 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2093 if (fcode2 == END_BUILTINS)
2094 return NULL_TREE;
2096 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2097 return NULL_TREE;
2099 return builtin_decl_explicit (fcode2);
2102 /* Like mathfn_built_in_1, but always use the implicit array. */
2104 tree
2105 mathfn_built_in (tree type, combined_fn fn)
2107 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2110 /* Like mathfn_built_in_1, but take a built_in_function and
2111 always use the implicit array. */
2113 tree
2114 mathfn_built_in (tree type, enum built_in_function fn)
2116 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2119 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2120 return its code, otherwise return IFN_LAST. Note that this function
2121 only tests whether the function is defined in internals.def, not whether
2122 it is actually available on the target. */
2124 internal_fn
2125 associated_internal_fn (tree fndecl)
2127 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2128 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2129 switch (DECL_FUNCTION_CODE (fndecl))
2131 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2132 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2133 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2134 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2135 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2136 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2137 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2138 #include "internal-fn.def"
2140 CASE_FLT_FN (BUILT_IN_POW10):
2141 return IFN_EXP10;
2143 CASE_FLT_FN (BUILT_IN_DREM):
2144 return IFN_REMAINDER;
2146 CASE_FLT_FN (BUILT_IN_SCALBN):
2147 CASE_FLT_FN (BUILT_IN_SCALBLN):
2148 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2149 return IFN_LDEXP;
2150 return IFN_LAST;
2152 default:
2153 return IFN_LAST;
2157 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2158 on the current target by a call to an internal function, return the
2159 code of that internal function, otherwise return IFN_LAST. The caller
2160 is responsible for ensuring that any side-effects of the built-in
2161 call are dealt with correctly. E.g. if CALL sets errno, the caller
2162 must decide that the errno result isn't needed or make it available
2163 in some other way. */
2165 internal_fn
2166 replacement_internal_fn (gcall *call)
2168 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2170 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2171 if (ifn != IFN_LAST)
2173 tree_pair types = direct_internal_fn_types (ifn, call);
2174 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2175 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2176 return ifn;
2179 return IFN_LAST;
2182 /* Expand a call to the builtin trinary math functions (fma).
2183 Return NULL_RTX if a normal call should be emitted rather than expanding the
2184 function in-line. EXP is the expression that is a call to the builtin
2185 function; if convenient, the result should be placed in TARGET.
2186 SUBTARGET may be used as the target for computing one of EXP's
2187 operands. */
2189 static rtx
2190 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2192 optab builtin_optab;
2193 rtx op0, op1, op2, result;
2194 rtx_insn *insns;
2195 tree fndecl = get_callee_fndecl (exp);
2196 tree arg0, arg1, arg2;
2197 machine_mode mode;
2199 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2200 return NULL_RTX;
2202 arg0 = CALL_EXPR_ARG (exp, 0);
2203 arg1 = CALL_EXPR_ARG (exp, 1);
2204 arg2 = CALL_EXPR_ARG (exp, 2);
2206 switch (DECL_FUNCTION_CODE (fndecl))
2208 CASE_FLT_FN (BUILT_IN_FMA):
2209 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2210 builtin_optab = fma_optab; break;
2211 default:
2212 gcc_unreachable ();
2215 /* Make a suitable register to place result in. */
2216 mode = TYPE_MODE (TREE_TYPE (exp));
2218 /* Before working hard, check whether the instruction is available. */
2219 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2220 return NULL_RTX;
2222 result = gen_reg_rtx (mode);
2224 /* Always stabilize the argument list. */
2225 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2226 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2227 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2229 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2230 op1 = expand_normal (arg1);
2231 op2 = expand_normal (arg2);
2233 start_sequence ();
2235 /* Compute into RESULT.
2236 Set RESULT to wherever the result comes back. */
2237 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2238 result, 0);
2240 /* If we were unable to expand via the builtin, stop the sequence
2241 (without outputting the insns) and call to the library function
2242 with the stabilized argument list. */
2243 if (result == 0)
2245 end_sequence ();
2246 return expand_call (exp, target, target == const0_rtx);
2249 /* Output the entire sequence. */
2250 insns = get_insns ();
2251 end_sequence ();
2252 emit_insn (insns);
2254 return result;
2257 /* Expand a call to the builtin sin and cos math functions.
2258 Return NULL_RTX if a normal call should be emitted rather than expanding the
2259 function in-line. EXP is the expression that is a call to the builtin
2260 function; if convenient, the result should be placed in TARGET.
2261 SUBTARGET may be used as the target for computing one of EXP's
2262 operands. */
2264 static rtx
2265 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2267 optab builtin_optab;
2268 rtx op0;
2269 rtx_insn *insns;
2270 tree fndecl = get_callee_fndecl (exp);
2271 machine_mode mode;
2272 tree arg;
2274 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2275 return NULL_RTX;
2277 arg = CALL_EXPR_ARG (exp, 0);
2279 switch (DECL_FUNCTION_CODE (fndecl))
2281 CASE_FLT_FN (BUILT_IN_SIN):
2282 CASE_FLT_FN (BUILT_IN_COS):
2283 builtin_optab = sincos_optab; break;
2284 default:
2285 gcc_unreachable ();
2288 /* Make a suitable register to place result in. */
2289 mode = TYPE_MODE (TREE_TYPE (exp));
2291 /* Check if sincos insn is available, otherwise fallback
2292 to sin or cos insn. */
2293 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2294 switch (DECL_FUNCTION_CODE (fndecl))
2296 CASE_FLT_FN (BUILT_IN_SIN):
2297 builtin_optab = sin_optab; break;
2298 CASE_FLT_FN (BUILT_IN_COS):
2299 builtin_optab = cos_optab; break;
2300 default:
2301 gcc_unreachable ();
2304 /* Before working hard, check whether the instruction is available. */
2305 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2307 rtx result = gen_reg_rtx (mode);
2309 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2310 need to expand the argument again. This way, we will not perform
2311 side-effects more the once. */
2312 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2314 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2316 start_sequence ();
2318 /* Compute into RESULT.
2319 Set RESULT to wherever the result comes back. */
2320 if (builtin_optab == sincos_optab)
2322 int ok;
2324 switch (DECL_FUNCTION_CODE (fndecl))
2326 CASE_FLT_FN (BUILT_IN_SIN):
2327 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2328 break;
2329 CASE_FLT_FN (BUILT_IN_COS):
2330 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2331 break;
2332 default:
2333 gcc_unreachable ();
2335 gcc_assert (ok);
2337 else
2338 result = expand_unop (mode, builtin_optab, op0, result, 0);
2340 if (result != 0)
2342 /* Output the entire sequence. */
2343 insns = get_insns ();
2344 end_sequence ();
2345 emit_insn (insns);
2346 return result;
2349 /* If we were unable to expand via the builtin, stop the sequence
2350 (without outputting the insns) and call to the library function
2351 with the stabilized argument list. */
2352 end_sequence ();
2355 return expand_call (exp, target, target == const0_rtx);
2358 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2359 return an RTL instruction code that implements the functionality.
2360 If that isn't possible or available return CODE_FOR_nothing. */
2362 static enum insn_code
2363 interclass_mathfn_icode (tree arg, tree fndecl)
2365 bool errno_set = false;
2366 optab builtin_optab = unknown_optab;
2367 machine_mode mode;
2369 switch (DECL_FUNCTION_CODE (fndecl))
2371 CASE_FLT_FN (BUILT_IN_ILOGB):
2372 errno_set = true; builtin_optab = ilogb_optab; break;
2373 CASE_FLT_FN (BUILT_IN_ISINF):
2374 builtin_optab = isinf_optab; break;
2375 case BUILT_IN_ISNORMAL:
2376 case BUILT_IN_ISFINITE:
2377 CASE_FLT_FN (BUILT_IN_FINITE):
2378 case BUILT_IN_FINITED32:
2379 case BUILT_IN_FINITED64:
2380 case BUILT_IN_FINITED128:
2381 case BUILT_IN_ISINFD32:
2382 case BUILT_IN_ISINFD64:
2383 case BUILT_IN_ISINFD128:
2384 /* These builtins have no optabs (yet). */
2385 break;
2386 default:
2387 gcc_unreachable ();
2390 /* There's no easy way to detect the case we need to set EDOM. */
2391 if (flag_errno_math && errno_set)
2392 return CODE_FOR_nothing;
2394 /* Optab mode depends on the mode of the input argument. */
2395 mode = TYPE_MODE (TREE_TYPE (arg));
2397 if (builtin_optab)
2398 return optab_handler (builtin_optab, mode);
2399 return CODE_FOR_nothing;
2402 /* Expand a call to one of the builtin math functions that operate on
2403 floating point argument and output an integer result (ilogb, isinf,
2404 isnan, etc).
2405 Return 0 if a normal call should be emitted rather than expanding the
2406 function in-line. EXP is the expression that is a call to the builtin
2407 function; if convenient, the result should be placed in TARGET. */
2409 static rtx
2410 expand_builtin_interclass_mathfn (tree exp, rtx target)
2412 enum insn_code icode = CODE_FOR_nothing;
2413 rtx op0;
2414 tree fndecl = get_callee_fndecl (exp);
2415 machine_mode mode;
2416 tree arg;
2418 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2419 return NULL_RTX;
2421 arg = CALL_EXPR_ARG (exp, 0);
2422 icode = interclass_mathfn_icode (arg, fndecl);
2423 mode = TYPE_MODE (TREE_TYPE (arg));
2425 if (icode != CODE_FOR_nothing)
2427 struct expand_operand ops[1];
2428 rtx_insn *last = get_last_insn ();
2429 tree orig_arg = arg;
2431 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2432 need to expand the argument again. This way, we will not perform
2433 side-effects more the once. */
2434 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2436 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2438 if (mode != GET_MODE (op0))
2439 op0 = convert_to_mode (mode, op0, 0);
2441 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2442 if (maybe_legitimize_operands (icode, 0, 1, ops)
2443 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2444 return ops[0].value;
2446 delete_insns_since (last);
2447 CALL_EXPR_ARG (exp, 0) = orig_arg;
2450 return NULL_RTX;
2453 /* Expand a call to the builtin sincos math function.
2454 Return NULL_RTX if a normal call should be emitted rather than expanding the
2455 function in-line. EXP is the expression that is a call to the builtin
2456 function. */
2458 static rtx
2459 expand_builtin_sincos (tree exp)
2461 rtx op0, op1, op2, target1, target2;
2462 machine_mode mode;
2463 tree arg, sinp, cosp;
2464 int result;
2465 location_t loc = EXPR_LOCATION (exp);
2466 tree alias_type, alias_off;
2468 if (!validate_arglist (exp, REAL_TYPE,
2469 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2470 return NULL_RTX;
2472 arg = CALL_EXPR_ARG (exp, 0);
2473 sinp = CALL_EXPR_ARG (exp, 1);
2474 cosp = CALL_EXPR_ARG (exp, 2);
2476 /* Make a suitable register to place result in. */
2477 mode = TYPE_MODE (TREE_TYPE (arg));
2479 /* Check if sincos insn is available, otherwise emit the call. */
2480 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2481 return NULL_RTX;
2483 target1 = gen_reg_rtx (mode);
2484 target2 = gen_reg_rtx (mode);
2486 op0 = expand_normal (arg);
2487 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2488 alias_off = build_int_cst (alias_type, 0);
2489 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2490 sinp, alias_off));
2491 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2492 cosp, alias_off));
2494 /* Compute into target1 and target2.
2495 Set TARGET to wherever the result comes back. */
2496 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2497 gcc_assert (result);
2499 /* Move target1 and target2 to the memory locations indicated
2500 by op1 and op2. */
2501 emit_move_insn (op1, target1);
2502 emit_move_insn (op2, target2);
2504 return const0_rtx;
2507 /* Expand a call to the internal cexpi builtin to the sincos math function.
2508 EXP is the expression that is a call to the builtin function; if convenient,
2509 the result should be placed in TARGET. */
2511 static rtx
2512 expand_builtin_cexpi (tree exp, rtx target)
2514 tree fndecl = get_callee_fndecl (exp);
2515 tree arg, type;
2516 machine_mode mode;
2517 rtx op0, op1, op2;
2518 location_t loc = EXPR_LOCATION (exp);
2520 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2521 return NULL_RTX;
2523 arg = CALL_EXPR_ARG (exp, 0);
2524 type = TREE_TYPE (arg);
2525 mode = TYPE_MODE (TREE_TYPE (arg));
2527 /* Try expanding via a sincos optab, fall back to emitting a libcall
2528 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2529 is only generated from sincos, cexp or if we have either of them. */
2530 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2532 op1 = gen_reg_rtx (mode);
2533 op2 = gen_reg_rtx (mode);
2535 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2537 /* Compute into op1 and op2. */
2538 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2540 else if (targetm.libc_has_function (function_sincos))
2542 tree call, fn = NULL_TREE;
2543 tree top1, top2;
2544 rtx op1a, op2a;
2546 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2547 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2548 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2549 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2550 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2551 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2552 else
2553 gcc_unreachable ();
2555 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2556 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2557 op1a = copy_addr_to_reg (XEXP (op1, 0));
2558 op2a = copy_addr_to_reg (XEXP (op2, 0));
2559 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2560 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2562 /* Make sure not to fold the sincos call again. */
2563 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2564 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2565 call, 3, arg, top1, top2));
2567 else
2569 tree call, fn = NULL_TREE, narg;
2570 tree ctype = build_complex_type (type);
2572 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2573 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2574 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2575 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2576 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2577 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2578 else
2579 gcc_unreachable ();
2581 /* If we don't have a decl for cexp create one. This is the
2582 friendliest fallback if the user calls __builtin_cexpi
2583 without full target C99 function support. */
2584 if (fn == NULL_TREE)
2586 tree fntype;
2587 const char *name = NULL;
2589 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2590 name = "cexpf";
2591 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2592 name = "cexp";
2593 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2594 name = "cexpl";
2596 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2597 fn = build_fn_decl (name, fntype);
2600 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2601 build_real (type, dconst0), arg);
2603 /* Make sure not to fold the cexp call again. */
2604 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2605 return expand_expr (build_call_nary (ctype, call, 1, narg),
2606 target, VOIDmode, EXPAND_NORMAL);
2609 /* Now build the proper return type. */
2610 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2611 make_tree (TREE_TYPE (arg), op2),
2612 make_tree (TREE_TYPE (arg), op1)),
2613 target, VOIDmode, EXPAND_NORMAL);
2616 /* Conveniently construct a function call expression. FNDECL names the
2617 function to be called, N is the number of arguments, and the "..."
2618 parameters are the argument expressions. Unlike build_call_exr
2619 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2621 static tree
2622 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2624 va_list ap;
2625 tree fntype = TREE_TYPE (fndecl);
2626 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2628 va_start (ap, n);
2629 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2630 va_end (ap);
2631 SET_EXPR_LOCATION (fn, loc);
2632 return fn;
2635 /* Expand a call to one of the builtin rounding functions gcc defines
2636 as an extension (lfloor and lceil). As these are gcc extensions we
2637 do not need to worry about setting errno to EDOM.
2638 If expanding via optab fails, lower expression to (int)(floor(x)).
2639 EXP is the expression that is a call to the builtin function;
2640 if convenient, the result should be placed in TARGET. */
2642 static rtx
2643 expand_builtin_int_roundingfn (tree exp, rtx target)
2645 convert_optab builtin_optab;
2646 rtx op0, tmp;
2647 rtx_insn *insns;
2648 tree fndecl = get_callee_fndecl (exp);
2649 enum built_in_function fallback_fn;
2650 tree fallback_fndecl;
2651 machine_mode mode;
2652 tree arg;
2654 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2655 gcc_unreachable ();
2657 arg = CALL_EXPR_ARG (exp, 0);
2659 switch (DECL_FUNCTION_CODE (fndecl))
2661 CASE_FLT_FN (BUILT_IN_ICEIL):
2662 CASE_FLT_FN (BUILT_IN_LCEIL):
2663 CASE_FLT_FN (BUILT_IN_LLCEIL):
2664 builtin_optab = lceil_optab;
2665 fallback_fn = BUILT_IN_CEIL;
2666 break;
2668 CASE_FLT_FN (BUILT_IN_IFLOOR):
2669 CASE_FLT_FN (BUILT_IN_LFLOOR):
2670 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2671 builtin_optab = lfloor_optab;
2672 fallback_fn = BUILT_IN_FLOOR;
2673 break;
2675 default:
2676 gcc_unreachable ();
2679 /* Make a suitable register to place result in. */
2680 mode = TYPE_MODE (TREE_TYPE (exp));
2682 target = gen_reg_rtx (mode);
2684 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2685 need to expand the argument again. This way, we will not perform
2686 side-effects more the once. */
2687 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2689 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2691 start_sequence ();
2693 /* Compute into TARGET. */
2694 if (expand_sfix_optab (target, op0, builtin_optab))
2696 /* Output the entire sequence. */
2697 insns = get_insns ();
2698 end_sequence ();
2699 emit_insn (insns);
2700 return target;
2703 /* If we were unable to expand via the builtin, stop the sequence
2704 (without outputting the insns). */
2705 end_sequence ();
2707 /* Fall back to floating point rounding optab. */
2708 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2710 /* For non-C99 targets we may end up without a fallback fndecl here
2711 if the user called __builtin_lfloor directly. In this case emit
2712 a call to the floor/ceil variants nevertheless. This should result
2713 in the best user experience for not full C99 targets. */
2714 if (fallback_fndecl == NULL_TREE)
2716 tree fntype;
2717 const char *name = NULL;
2719 switch (DECL_FUNCTION_CODE (fndecl))
2721 case BUILT_IN_ICEIL:
2722 case BUILT_IN_LCEIL:
2723 case BUILT_IN_LLCEIL:
2724 name = "ceil";
2725 break;
2726 case BUILT_IN_ICEILF:
2727 case BUILT_IN_LCEILF:
2728 case BUILT_IN_LLCEILF:
2729 name = "ceilf";
2730 break;
2731 case BUILT_IN_ICEILL:
2732 case BUILT_IN_LCEILL:
2733 case BUILT_IN_LLCEILL:
2734 name = "ceill";
2735 break;
2736 case BUILT_IN_IFLOOR:
2737 case BUILT_IN_LFLOOR:
2738 case BUILT_IN_LLFLOOR:
2739 name = "floor";
2740 break;
2741 case BUILT_IN_IFLOORF:
2742 case BUILT_IN_LFLOORF:
2743 case BUILT_IN_LLFLOORF:
2744 name = "floorf";
2745 break;
2746 case BUILT_IN_IFLOORL:
2747 case BUILT_IN_LFLOORL:
2748 case BUILT_IN_LLFLOORL:
2749 name = "floorl";
2750 break;
2751 default:
2752 gcc_unreachable ();
2755 fntype = build_function_type_list (TREE_TYPE (arg),
2756 TREE_TYPE (arg), NULL_TREE);
2757 fallback_fndecl = build_fn_decl (name, fntype);
2760 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2762 tmp = expand_normal (exp);
2763 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2765 /* Truncate the result of floating point optab to integer
2766 via expand_fix (). */
2767 target = gen_reg_rtx (mode);
2768 expand_fix (target, tmp, 0);
2770 return target;
2773 /* Expand a call to one of the builtin math functions doing integer
2774 conversion (lrint).
2775 Return 0 if a normal call should be emitted rather than expanding the
2776 function in-line. EXP is the expression that is a call to the builtin
2777 function; if convenient, the result should be placed in TARGET. */
2779 static rtx
2780 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2782 convert_optab builtin_optab;
2783 rtx op0;
2784 rtx_insn *insns;
2785 tree fndecl = get_callee_fndecl (exp);
2786 tree arg;
2787 machine_mode mode;
2788 enum built_in_function fallback_fn = BUILT_IN_NONE;
2790 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2791 gcc_unreachable ();
2793 arg = CALL_EXPR_ARG (exp, 0);
2795 switch (DECL_FUNCTION_CODE (fndecl))
2797 CASE_FLT_FN (BUILT_IN_IRINT):
2798 fallback_fn = BUILT_IN_LRINT;
2799 gcc_fallthrough ();
2800 CASE_FLT_FN (BUILT_IN_LRINT):
2801 CASE_FLT_FN (BUILT_IN_LLRINT):
2802 builtin_optab = lrint_optab;
2803 break;
2805 CASE_FLT_FN (BUILT_IN_IROUND):
2806 fallback_fn = BUILT_IN_LROUND;
2807 gcc_fallthrough ();
2808 CASE_FLT_FN (BUILT_IN_LROUND):
2809 CASE_FLT_FN (BUILT_IN_LLROUND):
2810 builtin_optab = lround_optab;
2811 break;
2813 default:
2814 gcc_unreachable ();
2817 /* There's no easy way to detect the case we need to set EDOM. */
2818 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2819 return NULL_RTX;
2821 /* Make a suitable register to place result in. */
2822 mode = TYPE_MODE (TREE_TYPE (exp));
2824 /* There's no easy way to detect the case we need to set EDOM. */
2825 if (!flag_errno_math)
2827 rtx result = gen_reg_rtx (mode);
2829 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2830 need to expand the argument again. This way, we will not perform
2831 side-effects more the once. */
2832 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2834 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2836 start_sequence ();
2838 if (expand_sfix_optab (result, op0, builtin_optab))
2840 /* Output the entire sequence. */
2841 insns = get_insns ();
2842 end_sequence ();
2843 emit_insn (insns);
2844 return result;
2847 /* If we were unable to expand via the builtin, stop the sequence
2848 (without outputting the insns) and call to the library function
2849 with the stabilized argument list. */
2850 end_sequence ();
2853 if (fallback_fn != BUILT_IN_NONE)
2855 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2856 targets, (int) round (x) should never be transformed into
2857 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2858 a call to lround in the hope that the target provides at least some
2859 C99 functions. This should result in the best user experience for
2860 not full C99 targets. */
2861 tree fallback_fndecl = mathfn_built_in_1
2862 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2864 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2865 fallback_fndecl, 1, arg);
2867 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2868 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2869 return convert_to_mode (mode, target, 0);
2872 return expand_call (exp, target, target == const0_rtx);
2875 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2876 a normal call should be emitted rather than expanding the function
2877 in-line. EXP is the expression that is a call to the builtin
2878 function; if convenient, the result should be placed in TARGET. */
2880 static rtx
2881 expand_builtin_powi (tree exp, rtx target)
2883 tree arg0, arg1;
2884 rtx op0, op1;
2885 machine_mode mode;
2886 machine_mode mode2;
2888 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2889 return NULL_RTX;
2891 arg0 = CALL_EXPR_ARG (exp, 0);
2892 arg1 = CALL_EXPR_ARG (exp, 1);
2893 mode = TYPE_MODE (TREE_TYPE (exp));
2895 /* Emit a libcall to libgcc. */
2897 /* Mode of the 2nd argument must match that of an int. */
2898 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2900 if (target == NULL_RTX)
2901 target = gen_reg_rtx (mode);
2903 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2904 if (GET_MODE (op0) != mode)
2905 op0 = convert_to_mode (mode, op0, 0);
2906 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2907 if (GET_MODE (op1) != mode2)
2908 op1 = convert_to_mode (mode2, op1, 0);
2910 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2911 target, LCT_CONST, mode,
2912 op0, mode, op1, mode2);
2914 return target;
2917 /* Expand expression EXP which is a call to the strlen builtin. Return
2918 NULL_RTX if we failed and the caller should emit a normal call, otherwise
2919 try to get the result in TARGET, if convenient. */
2921 static rtx
2922 expand_builtin_strlen (tree exp, rtx target,
2923 machine_mode target_mode)
2925 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2926 return NULL_RTX;
2928 struct expand_operand ops[4];
2929 rtx pat;
2930 tree len;
2931 tree src = CALL_EXPR_ARG (exp, 0);
2932 rtx src_reg;
2933 rtx_insn *before_strlen;
2934 machine_mode insn_mode;
2935 enum insn_code icode = CODE_FOR_nothing;
2936 unsigned int align;
2938 /* If the length can be computed at compile-time, return it. */
2939 len = c_strlen (src, 0);
2940 if (len)
2941 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2943 /* If the length can be computed at compile-time and is constant
2944 integer, but there are side-effects in src, evaluate
2945 src for side-effects, then return len.
2946 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2947 can be optimized into: i++; x = 3; */
2948 len = c_strlen (src, 1);
2949 if (len && TREE_CODE (len) == INTEGER_CST)
2951 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2952 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2955 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2957 /* If SRC is not a pointer type, don't do this operation inline. */
2958 if (align == 0)
2959 return NULL_RTX;
2961 /* Bail out if we can't compute strlen in the right mode. */
2962 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2964 icode = optab_handler (strlen_optab, insn_mode);
2965 if (icode != CODE_FOR_nothing)
2966 break;
2968 if (insn_mode == VOIDmode)
2969 return NULL_RTX;
2971 /* Make a place to hold the source address. We will not expand
2972 the actual source until we are sure that the expansion will
2973 not fail -- there are trees that cannot be expanded twice. */
2974 src_reg = gen_reg_rtx (Pmode);
2976 /* Mark the beginning of the strlen sequence so we can emit the
2977 source operand later. */
2978 before_strlen = get_last_insn ();
2980 create_output_operand (&ops[0], target, insn_mode);
2981 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2982 create_integer_operand (&ops[2], 0);
2983 create_integer_operand (&ops[3], align);
2984 if (!maybe_expand_insn (icode, 4, ops))
2985 return NULL_RTX;
2987 /* Check to see if the argument was declared attribute nonstring
2988 and if so, issue a warning since at this point it's not known
2989 to be nul-terminated. */
2990 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
2992 /* Now that we are assured of success, expand the source. */
2993 start_sequence ();
2994 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2995 if (pat != src_reg)
2997 #ifdef POINTERS_EXTEND_UNSIGNED
2998 if (GET_MODE (pat) != Pmode)
2999 pat = convert_to_mode (Pmode, pat,
3000 POINTERS_EXTEND_UNSIGNED);
3001 #endif
3002 emit_move_insn (src_reg, pat);
3004 pat = get_insns ();
3005 end_sequence ();
3007 if (before_strlen)
3008 emit_insn_after (pat, before_strlen);
3009 else
3010 emit_insn_before (pat, get_insns ());
3012 /* Return the value in the proper mode for this function. */
3013 if (GET_MODE (ops[0].value) == target_mode)
3014 target = ops[0].value;
3015 else if (target != 0)
3016 convert_move (target, ops[0].value, 0);
3017 else
3018 target = convert_to_mode (target_mode, ops[0].value, 0);
3020 return target;
3023 /* Expand call EXP to the strnlen built-in, returning the result
3024 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3026 static rtx
3027 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3029 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3030 return NULL_RTX;
3032 tree src = CALL_EXPR_ARG (exp, 0);
3033 tree bound = CALL_EXPR_ARG (exp, 1);
3035 if (!bound)
3036 return NULL_RTX;
3038 location_t loc = UNKNOWN_LOCATION;
3039 if (EXPR_HAS_LOCATION (exp))
3040 loc = EXPR_LOCATION (exp);
3042 tree maxobjsize = max_object_size ();
3043 tree func = get_callee_fndecl (exp);
3045 tree len = c_strlen (src, 0);
3046 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3047 so these conversions aren't necessary. */
3048 if (len)
3049 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3051 if (TREE_CODE (bound) == INTEGER_CST)
3053 if (!TREE_NO_WARNING (exp)
3054 && tree_int_cst_lt (maxobjsize, bound)
3055 && warning_at (loc, OPT_Wstringop_overflow_,
3056 "%K%qD specified bound %E "
3057 "exceeds maximum object size %E",
3058 exp, func, bound, maxobjsize))
3059 TREE_NO_WARNING (exp) = true;
3061 if (!len || TREE_CODE (len) != INTEGER_CST)
3062 return NULL_RTX;
3064 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3065 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3068 if (TREE_CODE (bound) != SSA_NAME)
3069 return NULL_RTX;
3071 wide_int min, max;
3072 enum value_range_type rng = get_range_info (bound, &min, &max);
3073 if (rng != VR_RANGE)
3074 return NULL_RTX;
3076 if (!TREE_NO_WARNING (exp)
3077 && wi::ltu_p (wi::to_wide (maxobjsize), min)
3078 && warning_at (loc, OPT_Wstringop_overflow_,
3079 "%K%qD specified bound [%wu, %wu] "
3080 "exceeds maximum object size %E",
3081 exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
3082 TREE_NO_WARNING (exp) = true;
3084 if (!len || TREE_CODE (len) != INTEGER_CST)
3085 return NULL_RTX;
3087 if (wi::gtu_p (min, wi::to_wide (len)))
3088 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3090 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3091 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3094 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3095 bytes from constant string DATA + OFFSET and return it as target
3096 constant. */
3098 static rtx
3099 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3100 scalar_int_mode mode)
3102 const char *str = (const char *) data;
3104 gcc_assert (offset >= 0
3105 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3106 <= strlen (str) + 1));
3108 return c_readstr (str + offset, mode);
3111 /* LEN specify length of the block of memcpy/memset operation.
3112 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3113 In some cases we can make very likely guess on max size, then we
3114 set it into PROBABLE_MAX_SIZE. */
3116 static void
3117 determine_block_size (tree len, rtx len_rtx,
3118 unsigned HOST_WIDE_INT *min_size,
3119 unsigned HOST_WIDE_INT *max_size,
3120 unsigned HOST_WIDE_INT *probable_max_size)
3122 if (CONST_INT_P (len_rtx))
3124 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3125 return;
3127 else
3129 wide_int min, max;
3130 enum value_range_type range_type = VR_UNDEFINED;
3132 /* Determine bounds from the type. */
3133 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3134 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3135 else
3136 *min_size = 0;
3137 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3138 *probable_max_size = *max_size
3139 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3140 else
3141 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3143 if (TREE_CODE (len) == SSA_NAME)
3144 range_type = get_range_info (len, &min, &max);
3145 if (range_type == VR_RANGE)
3147 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3148 *min_size = min.to_uhwi ();
3149 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3150 *probable_max_size = *max_size = max.to_uhwi ();
3152 else if (range_type == VR_ANTI_RANGE)
3154 /* Anti range 0...N lets us to determine minimal size to N+1. */
3155 if (min == 0)
3157 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3158 *min_size = max.to_uhwi () + 1;
3160 /* Code like
3162 int n;
3163 if (n < 100)
3164 memcpy (a, b, n)
3166 Produce anti range allowing negative values of N. We still
3167 can use the information and make a guess that N is not negative.
3169 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3170 *probable_max_size = min.to_uhwi () - 1;
3173 gcc_checking_assert (*max_size <=
3174 (unsigned HOST_WIDE_INT)
3175 GET_MODE_MASK (GET_MODE (len_rtx)));
3178 /* Try to verify that the sizes and lengths of the arguments to a string
3179 manipulation function given by EXP are within valid bounds and that
3180 the operation does not lead to buffer overflow or read past the end.
3181 Arguments other than EXP may be null. When non-null, the arguments
3182 have the following meaning:
3183 DST is the destination of a copy call or NULL otherwise.
3184 SRC is the source of a copy call or NULL otherwise.
3185 DSTWRITE is the number of bytes written into the destination obtained
3186 from the user-supplied size argument to the function (such as in
3187 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3188 MAXREAD is the user-supplied bound on the length of the source sequence
3189 (such as in strncat(d, s, N). It specifies the upper limit on the number
3190 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3191 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3192 expression EXP is a string function call (as opposed to a memory call
3193 like memcpy). As an exception, SRCSTR can also be an integer denoting
3194 the precomputed size of the source string or object (for functions like
3195 memcpy).
3196 DSTSIZE is the size of the destination object specified by the last
3197 argument to the _chk builtins, typically resulting from the expansion
3198 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3199 DSTSIZE).
3201 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3202 SIZE_MAX.
3204 If the call is successfully verified as safe return true, otherwise
3205 return false. */
3207 static bool
3208 check_access (tree exp, tree, tree, tree dstwrite,
3209 tree maxread, tree srcstr, tree dstsize)
3211 int opt = OPT_Wstringop_overflow_;
3213 /* The size of the largest object is half the address space, or
3214 PTRDIFF_MAX. (This is way too permissive.) */
3215 tree maxobjsize = max_object_size ();
3217 /* Either the length of the source string for string functions or
3218 the size of the source object for raw memory functions. */
3219 tree slen = NULL_TREE;
3221 tree range[2] = { NULL_TREE, NULL_TREE };
3223 /* Set to true when the exact number of bytes written by a string
3224 function like strcpy is not known and the only thing that is
3225 known is that it must be at least one (for the terminating nul). */
3226 bool at_least_one = false;
3227 if (srcstr)
3229 /* SRCSTR is normally a pointer to string but as a special case
3230 it can be an integer denoting the length of a string. */
3231 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3233 /* Try to determine the range of lengths the source string
3234 refers to. If it can be determined and is less than
3235 the upper bound given by MAXREAD add one to it for
3236 the terminating nul. Otherwise, set it to one for
3237 the same reason, or to MAXREAD as appropriate. */
3238 get_range_strlen (srcstr, range);
3239 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3241 if (maxread && tree_int_cst_le (maxread, range[0]))
3242 range[0] = range[1] = maxread;
3243 else
3244 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3245 range[0], size_one_node);
3247 if (maxread && tree_int_cst_le (maxread, range[1]))
3248 range[1] = maxread;
3249 else if (!integer_all_onesp (range[1]))
3250 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3251 range[1], size_one_node);
3253 slen = range[0];
3255 else
3257 at_least_one = true;
3258 slen = size_one_node;
3261 else
3262 slen = srcstr;
3265 if (!dstwrite && !maxread)
3267 /* When the only available piece of data is the object size
3268 there is nothing to do. */
3269 if (!slen)
3270 return true;
3272 /* Otherwise, when the length of the source sequence is known
3273 (as with strlen), set DSTWRITE to it. */
3274 if (!range[0])
3275 dstwrite = slen;
3278 if (!dstsize)
3279 dstsize = maxobjsize;
3281 if (dstwrite)
3282 get_size_range (dstwrite, range);
3284 tree func = get_callee_fndecl (exp);
3286 /* First check the number of bytes to be written against the maximum
3287 object size. */
3288 if (range[0]
3289 && TREE_CODE (range[0]) == INTEGER_CST
3290 && tree_int_cst_lt (maxobjsize, range[0]))
3292 if (TREE_NO_WARNING (exp))
3293 return false;
3295 location_t loc = tree_nonartificial_location (exp);
3296 loc = expansion_point_location_if_in_system_header (loc);
3298 bool warned;
3299 if (range[0] == range[1])
3300 warned = warning_at (loc, opt,
3301 "%K%qD specified size %E "
3302 "exceeds maximum object size %E",
3303 exp, func, range[0], maxobjsize);
3304 else
3305 warned = warning_at (loc, opt,
3306 "%K%qD specified size between %E and %E "
3307 "exceeds maximum object size %E",
3308 exp, func,
3309 range[0], range[1], maxobjsize);
3310 if (warned)
3311 TREE_NO_WARNING (exp) = true;
3313 return false;
3316 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3317 constant, and in range of unsigned HOST_WIDE_INT. */
3318 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3320 /* Next check the number of bytes to be written against the destination
3321 object size. */
3322 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3324 if (range[0]
3325 && TREE_CODE (range[0]) == INTEGER_CST
3326 && ((tree_fits_uhwi_p (dstsize)
3327 && tree_int_cst_lt (dstsize, range[0]))
3328 || (dstwrite
3329 && tree_fits_uhwi_p (dstwrite)
3330 && tree_int_cst_lt (dstwrite, range[0]))))
3332 if (TREE_NO_WARNING (exp))
3333 return false;
3335 location_t loc = tree_nonartificial_location (exp);
3336 loc = expansion_point_location_if_in_system_header (loc);
3338 if (dstwrite == slen && at_least_one)
3340 /* This is a call to strcpy with a destination of 0 size
3341 and a source of unknown length. The call will write
3342 at least one byte past the end of the destination. */
3343 warning_at (loc, opt,
3344 "%K%qD writing %E or more bytes into a region "
3345 "of size %E overflows the destination",
3346 exp, func, range[0], dstsize);
3348 else if (tree_int_cst_equal (range[0], range[1]))
3349 warning_n (loc, opt, tree_to_uhwi (range[0]),
3350 "%K%qD writing %E byte into a region "
3351 "of size %E overflows the destination",
3352 "%K%qD writing %E bytes into a region "
3353 "of size %E overflows the destination",
3354 exp, func, range[0], dstsize);
3355 else if (tree_int_cst_sign_bit (range[1]))
3357 /* Avoid printing the upper bound if it's invalid. */
3358 warning_at (loc, opt,
3359 "%K%qD writing %E or more bytes into a region "
3360 "of size %E overflows the destination",
3361 exp, func, range[0], dstsize);
3363 else
3364 warning_at (loc, opt,
3365 "%K%qD writing between %E and %E bytes into "
3366 "a region of size %E overflows the destination",
3367 exp, func, range[0], range[1],
3368 dstsize);
3370 /* Return error when an overflow has been detected. */
3371 return false;
3375 /* Check the maximum length of the source sequence against the size
3376 of the destination object if known, or against the maximum size
3377 of an object. */
3378 if (maxread)
3380 get_size_range (maxread, range);
3382 /* Use the lower end for MAXREAD from now on. */
3383 if (range[0])
3384 maxread = range[0];
3386 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3388 location_t loc = tree_nonartificial_location (exp);
3389 loc = expansion_point_location_if_in_system_header (loc);
3391 if (tree_int_cst_lt (maxobjsize, range[0]))
3393 if (TREE_NO_WARNING (exp))
3394 return false;
3396 /* Warn about crazy big sizes first since that's more
3397 likely to be meaningful than saying that the bound
3398 is greater than the object size if both are big. */
3399 if (range[0] == range[1])
3400 warning_at (loc, opt,
3401 "%K%qD specified bound %E "
3402 "exceeds maximum object size %E",
3403 exp, func,
3404 range[0], maxobjsize);
3405 else
3406 warning_at (loc, opt,
3407 "%K%qD specified bound between %E and %E "
3408 "exceeds maximum object size %E",
3409 exp, func,
3410 range[0], range[1], maxobjsize);
3412 return false;
3415 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3417 if (TREE_NO_WARNING (exp))
3418 return false;
3420 if (tree_int_cst_equal (range[0], range[1]))
3421 warning_at (loc, opt,
3422 "%K%qD specified bound %E "
3423 "exceeds destination size %E",
3424 exp, func,
3425 range[0], dstsize);
3426 else
3427 warning_at (loc, opt,
3428 "%K%qD specified bound between %E and %E "
3429 "exceeds destination size %E",
3430 exp, func,
3431 range[0], range[1], dstsize);
3432 return false;
3437 /* Check for reading past the end of SRC. */
3438 if (slen
3439 && slen == srcstr
3440 && dstwrite && range[0]
3441 && tree_int_cst_lt (slen, range[0]))
3443 if (TREE_NO_WARNING (exp))
3444 return false;
3446 location_t loc = tree_nonartificial_location (exp);
3448 if (tree_int_cst_equal (range[0], range[1]))
3449 warning_n (loc, opt, tree_to_uhwi (range[0]),
3450 "%K%qD reading %E byte from a region of size %E",
3451 "%K%qD reading %E bytes from a region of size %E",
3452 exp, func, range[0], slen);
3453 else if (tree_int_cst_sign_bit (range[1]))
3455 /* Avoid printing the upper bound if it's invalid. */
3456 warning_at (loc, opt,
3457 "%K%qD reading %E or more bytes from a region "
3458 "of size %E",
3459 exp, func, range[0], slen);
3461 else
3462 warning_at (loc, opt,
3463 "%K%qD reading between %E and %E bytes from a region "
3464 "of size %E",
3465 exp, func, range[0], range[1], slen);
3466 return false;
3469 return true;
3472 /* Helper to compute the size of the object referenced by the DEST
3473 expression which must have pointer type, using Object Size type
3474 OSTYPE (only the least significant 2 bits are used). Return
3475 an estimate of the size of the object if successful or NULL when
3476 the size cannot be determined. When the referenced object involves
3477 a non-constant offset in some range the returned value represents
3478 the largest size given the smallest non-negative offset in the
3479 range. The function is intended for diagnostics and should not
3480 be used to influence code generation or optimization. */
3482 tree
3483 compute_objsize (tree dest, int ostype)
3485 unsigned HOST_WIDE_INT size;
3487 /* Only the two least significant bits are meaningful. */
3488 ostype &= 3;
3490 if (compute_builtin_object_size (dest, ostype, &size))
3491 return build_int_cst (sizetype, size);
3493 if (TREE_CODE (dest) == SSA_NAME)
3495 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3496 if (!is_gimple_assign (stmt))
3497 return NULL_TREE;
3499 dest = gimple_assign_rhs1 (stmt);
3501 tree_code code = gimple_assign_rhs_code (stmt);
3502 if (code == POINTER_PLUS_EXPR)
3504 /* compute_builtin_object_size fails for addresses with
3505 non-constant offsets. Try to determine the range of
3506 such an offset here and use it to adjust the constant
3507 size. */
3508 tree off = gimple_assign_rhs2 (stmt);
3509 if (TREE_CODE (off) == INTEGER_CST)
3511 if (tree size = compute_objsize (dest, ostype))
3513 wide_int wioff = wi::to_wide (off);
3514 wide_int wisiz = wi::to_wide (size);
3516 /* Ignore negative offsets for now. For others,
3517 use the lower bound as the most optimistic
3518 estimate of the (remaining) size. */
3519 if (wi::sign_mask (wioff))
3521 else if (wi::ltu_p (wioff, wisiz))
3522 return wide_int_to_tree (TREE_TYPE (size),
3523 wi::sub (wisiz, wioff));
3524 else
3525 return size_zero_node;
3528 else if (TREE_CODE (off) == SSA_NAME
3529 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3531 wide_int min, max;
3532 enum value_range_type rng = get_range_info (off, &min, &max);
3534 if (rng == VR_RANGE)
3536 if (tree size = compute_objsize (dest, ostype))
3538 wide_int wisiz = wi::to_wide (size);
3540 /* Ignore negative offsets for now. For others,
3541 use the lower bound as the most optimistic
3542 estimate of the (remaining)size. */
3543 if (wi::sign_mask (min))
3545 else if (wi::ltu_p (min, wisiz))
3546 return wide_int_to_tree (TREE_TYPE (size),
3547 wi::sub (wisiz, min));
3548 else
3549 return size_zero_node;
3554 else if (code != ADDR_EXPR)
3555 return NULL_TREE;
3558 /* Unless computing the largest size (for memcpy and other raw memory
3559 functions), try to determine the size of the object from its type. */
3560 if (!ostype)
3561 return NULL_TREE;
3563 if (TREE_CODE (dest) != ADDR_EXPR)
3564 return NULL_TREE;
3566 tree type = TREE_TYPE (dest);
3567 if (TREE_CODE (type) == POINTER_TYPE)
3568 type = TREE_TYPE (type);
3570 type = TYPE_MAIN_VARIANT (type);
3572 if (TREE_CODE (type) == ARRAY_TYPE
3573 && !array_at_struct_end_p (TREE_OPERAND (dest, 0)))
3575 /* Return the constant size unless it's zero (that's a zero-length
3576 array likely at the end of a struct). */
3577 tree size = TYPE_SIZE_UNIT (type);
3578 if (size && TREE_CODE (size) == INTEGER_CST
3579 && !integer_zerop (size))
3580 return size;
3583 return NULL_TREE;
3586 /* Helper to determine and check the sizes of the source and the destination
3587 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3588 call expression, DEST is the destination argument, SRC is the source
3589 argument or null, and LEN is the number of bytes. Use Object Size type-0
3590 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3591 (no overflow or invalid sizes), false otherwise. */
3593 static bool
3594 check_memop_access (tree exp, tree dest, tree src, tree size)
3596 /* For functions like memset and memcpy that operate on raw memory
3597 try to determine the size of the largest source and destination
3598 object using type-0 Object Size regardless of the object size
3599 type specified by the option. */
3600 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3601 tree dstsize = compute_objsize (dest, 0);
3603 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3604 srcsize, dstsize);
3607 /* Validate memchr arguments without performing any expansion.
3608 Return NULL_RTX. */
3610 static rtx
3611 expand_builtin_memchr (tree exp, rtx)
3613 if (!validate_arglist (exp,
3614 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3615 return NULL_RTX;
3617 tree arg1 = CALL_EXPR_ARG (exp, 0);
3618 tree len = CALL_EXPR_ARG (exp, 2);
3620 /* Diagnose calls where the specified length exceeds the size
3621 of the object. */
3622 if (warn_stringop_overflow)
3624 tree size = compute_objsize (arg1, 0);
3625 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3626 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
3629 return NULL_RTX;
3632 /* Expand a call EXP to the memcpy builtin.
3633 Return NULL_RTX if we failed, the caller should emit a normal call,
3634 otherwise try to get the result in TARGET, if convenient (and in
3635 mode MODE if that's convenient). */
3637 static rtx
3638 expand_builtin_memcpy (tree exp, rtx target)
3640 if (!validate_arglist (exp,
3641 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3642 return NULL_RTX;
3644 tree dest = CALL_EXPR_ARG (exp, 0);
3645 tree src = CALL_EXPR_ARG (exp, 1);
3646 tree len = CALL_EXPR_ARG (exp, 2);
3648 check_memop_access (exp, dest, src, len);
3650 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3651 /*endp=*/ 0);
3654 /* Check a call EXP to the memmove built-in for validity.
3655 Return NULL_RTX on both success and failure. */
3657 static rtx
3658 expand_builtin_memmove (tree exp, rtx)
3660 if (!validate_arglist (exp,
3661 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3662 return NULL_RTX;
3664 tree dest = CALL_EXPR_ARG (exp, 0);
3665 tree src = CALL_EXPR_ARG (exp, 1);
3666 tree len = CALL_EXPR_ARG (exp, 2);
3668 check_memop_access (exp, dest, src, len);
3670 return NULL_RTX;
3673 /* Expand a call EXP to the mempcpy builtin.
3674 Return NULL_RTX if we failed; the caller should emit a normal call,
3675 otherwise try to get the result in TARGET, if convenient (and in
3676 mode MODE if that's convenient). If ENDP is 0 return the
3677 destination pointer, if ENDP is 1 return the end pointer ala
3678 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3679 stpcpy. */
3681 static rtx
3682 expand_builtin_mempcpy (tree exp, rtx target)
3684 if (!validate_arglist (exp,
3685 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3686 return NULL_RTX;
3688 tree dest = CALL_EXPR_ARG (exp, 0);
3689 tree src = CALL_EXPR_ARG (exp, 1);
3690 tree len = CALL_EXPR_ARG (exp, 2);
3692 /* Policy does not generally allow using compute_objsize (which
3693 is used internally by check_memop_size) to change code generation
3694 or drive optimization decisions.
3696 In this instance it is safe because the code we generate has
3697 the same semantics regardless of the return value of
3698 check_memop_sizes. Exactly the same amount of data is copied
3699 and the return value is exactly the same in both cases.
3701 Furthermore, check_memop_size always uses mode 0 for the call to
3702 compute_objsize, so the imprecise nature of compute_objsize is
3703 avoided. */
3705 /* Avoid expanding mempcpy into memcpy when the call is determined
3706 to overflow the buffer. This also prevents the same overflow
3707 from being diagnosed again when expanding memcpy. */
3708 if (!check_memop_access (exp, dest, src, len))
3709 return NULL_RTX;
3711 return expand_builtin_mempcpy_args (dest, src, len,
3712 target, exp, /*endp=*/ 1);
3715 /* Helper function to do the actual work for expand of memory copy family
3716 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3717 of memory from SRC to DEST and assign to TARGET if convenient.
3718 If ENDP is 0 return the
3719 destination pointer, if ENDP is 1 return the end pointer ala
3720 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3721 stpcpy. */
3723 static rtx
3724 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3725 rtx target, tree exp, int endp)
3727 const char *src_str;
3728 unsigned int src_align = get_pointer_alignment (src);
3729 unsigned int dest_align = get_pointer_alignment (dest);
3730 rtx dest_mem, src_mem, dest_addr, len_rtx;
3731 HOST_WIDE_INT expected_size = -1;
3732 unsigned int expected_align = 0;
3733 unsigned HOST_WIDE_INT min_size;
3734 unsigned HOST_WIDE_INT max_size;
3735 unsigned HOST_WIDE_INT probable_max_size;
3737 /* If DEST is not a pointer type, call the normal function. */
3738 if (dest_align == 0)
3739 return NULL_RTX;
3741 /* If either SRC is not a pointer type, don't do this
3742 operation in-line. */
3743 if (src_align == 0)
3744 return NULL_RTX;
3746 if (currently_expanding_gimple_stmt)
3747 stringop_block_profile (currently_expanding_gimple_stmt,
3748 &expected_align, &expected_size);
3750 if (expected_align < dest_align)
3751 expected_align = dest_align;
3752 dest_mem = get_memory_rtx (dest, len);
3753 set_mem_align (dest_mem, dest_align);
3754 len_rtx = expand_normal (len);
3755 determine_block_size (len, len_rtx, &min_size, &max_size,
3756 &probable_max_size);
3757 src_str = c_getstr (src);
3759 /* If SRC is a string constant and block move would be done
3760 by pieces, we can avoid loading the string from memory
3761 and only stored the computed constants. */
3762 if (src_str
3763 && CONST_INT_P (len_rtx)
3764 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3765 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3766 CONST_CAST (char *, src_str),
3767 dest_align, false))
3769 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3770 builtin_memcpy_read_str,
3771 CONST_CAST (char *, src_str),
3772 dest_align, false, endp);
3773 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3774 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3775 return dest_mem;
3778 src_mem = get_memory_rtx (src, len);
3779 set_mem_align (src_mem, src_align);
3781 /* Copy word part most expediently. */
3782 enum block_op_methods method = BLOCK_OP_NORMAL;
3783 if (CALL_EXPR_TAILCALL (exp) && (endp == 0 || target == const0_rtx))
3784 method = BLOCK_OP_TAILCALL;
3785 if (endp == 1 && target != const0_rtx)
3786 method = BLOCK_OP_NO_LIBCALL_RET;
3787 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3788 expected_align, expected_size,
3789 min_size, max_size, probable_max_size);
3790 if (dest_addr == pc_rtx)
3791 return NULL_RTX;
3793 if (dest_addr == 0)
3795 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3796 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3799 if (endp && target != const0_rtx)
3801 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3802 /* stpcpy pointer to last byte. */
3803 if (endp == 2)
3804 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3807 return dest_addr;
3810 static rtx
3811 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3812 rtx target, tree orig_exp, int endp)
3814 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3815 endp);
3818 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3819 we failed, the caller should emit a normal call, otherwise try to
3820 get the result in TARGET, if convenient. If ENDP is 0 return the
3821 destination pointer, if ENDP is 1 return the end pointer ala
3822 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3823 stpcpy. */
3825 static rtx
3826 expand_movstr (tree dest, tree src, rtx target, int endp)
3828 struct expand_operand ops[3];
3829 rtx dest_mem;
3830 rtx src_mem;
3832 if (!targetm.have_movstr ())
3833 return NULL_RTX;
3835 dest_mem = get_memory_rtx (dest, NULL);
3836 src_mem = get_memory_rtx (src, NULL);
3837 if (!endp)
3839 target = force_reg (Pmode, XEXP (dest_mem, 0));
3840 dest_mem = replace_equiv_address (dest_mem, target);
3843 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3844 create_fixed_operand (&ops[1], dest_mem);
3845 create_fixed_operand (&ops[2], src_mem);
3846 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3847 return NULL_RTX;
3849 if (endp && target != const0_rtx)
3851 target = ops[0].value;
3852 /* movstr is supposed to set end to the address of the NUL
3853 terminator. If the caller requested a mempcpy-like return value,
3854 adjust it. */
3855 if (endp == 1)
3857 rtx tem = plus_constant (GET_MODE (target),
3858 gen_lowpart (GET_MODE (target), target), 1);
3859 emit_move_insn (target, force_operand (tem, NULL_RTX));
3862 return target;
3865 /* Do some very basic size validation of a call to the strcpy builtin
3866 given by EXP. Return NULL_RTX to have the built-in expand to a call
3867 to the library function. */
3869 static rtx
3870 expand_builtin_strcat (tree exp, rtx)
3872 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3873 || !warn_stringop_overflow)
3874 return NULL_RTX;
3876 tree dest = CALL_EXPR_ARG (exp, 0);
3877 tree src = CALL_EXPR_ARG (exp, 1);
3879 /* There is no way here to determine the length of the string in
3880 the destination to which the SRC string is being appended so
3881 just diagnose cases when the souce string is longer than
3882 the destination object. */
3884 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3886 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
3887 destsize);
3889 return NULL_RTX;
3892 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3893 NULL_RTX if we failed the caller should emit a normal call, otherwise
3894 try to get the result in TARGET, if convenient (and in mode MODE if that's
3895 convenient). */
3897 static rtx
3898 expand_builtin_strcpy (tree exp, rtx target)
3900 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3901 return NULL_RTX;
3903 tree dest = CALL_EXPR_ARG (exp, 0);
3904 tree src = CALL_EXPR_ARG (exp, 1);
3906 if (warn_stringop_overflow)
3908 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3909 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3910 src, destsize);
3913 if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
3915 /* Check to see if the argument was declared attribute nonstring
3916 and if so, issue a warning since at this point it's not known
3917 to be nul-terminated. */
3918 tree fndecl = get_callee_fndecl (exp);
3919 maybe_warn_nonstring_arg (fndecl, exp);
3920 return ret;
3923 return NULL_RTX;
3926 /* Helper function to do the actual work for expand_builtin_strcpy. The
3927 arguments to the builtin_strcpy call DEST and SRC are broken out
3928 so that this can also be called without constructing an actual CALL_EXPR.
3929 The other arguments and return value are the same as for
3930 expand_builtin_strcpy. */
3932 static rtx
3933 expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target)
3935 /* Detect strcpy calls with unterminated arrays.. */
3936 if (tree nonstr = unterminated_array (src))
3938 /* NONSTR refers to the non-nul terminated constant array. */
3939 if (!TREE_NO_WARNING (exp))
3940 warn_string_no_nul (EXPR_LOCATION (exp), "strcpy", src, nonstr);
3941 return NULL_RTX;
3944 return expand_movstr (dest, src, target, /*endp=*/0);
3947 /* Expand a call EXP to the stpcpy builtin.
3948 Return NULL_RTX if we failed the caller should emit a normal call,
3949 otherwise try to get the result in TARGET, if convenient (and in
3950 mode MODE if that's convenient). */
3952 static rtx
3953 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
3955 tree dst, src;
3956 location_t loc = EXPR_LOCATION (exp);
3958 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3959 return NULL_RTX;
3961 dst = CALL_EXPR_ARG (exp, 0);
3962 src = CALL_EXPR_ARG (exp, 1);
3964 if (warn_stringop_overflow)
3966 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
3967 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3968 src, destsize);
3971 /* If return value is ignored, transform stpcpy into strcpy. */
3972 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3974 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3975 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3976 return expand_expr (result, target, mode, EXPAND_NORMAL);
3978 else
3980 tree len, lenp1;
3981 rtx ret;
3983 /* Ensure we get an actual string whose length can be evaluated at
3984 compile-time, not an expression containing a string. This is
3985 because the latter will potentially produce pessimized code
3986 when used to produce the return value. */
3987 tree nonstr = NULL_TREE;
3988 if (!c_getstr (src, NULL)
3989 || !(len = c_strlen (src, 0, &nonstr, 1)))
3990 return expand_movstr (dst, src, target, /*endp=*/2);
3992 if (nonstr && !TREE_NO_WARNING (exp))
3993 warn_string_no_nul (EXPR_LOCATION (exp), "stpcpy", src, nonstr);
3995 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3996 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3997 target, exp, /*endp=*/2);
3999 if (ret)
4000 return ret;
4002 if (TREE_CODE (len) == INTEGER_CST)
4004 rtx len_rtx = expand_normal (len);
4006 if (CONST_INT_P (len_rtx))
4008 ret = expand_builtin_strcpy_args (exp, dst, src, target);
4010 if (ret)
4012 if (! target)
4014 if (mode != VOIDmode)
4015 target = gen_reg_rtx (mode);
4016 else
4017 target = gen_reg_rtx (GET_MODE (ret));
4019 if (GET_MODE (target) != GET_MODE (ret))
4020 ret = gen_lowpart (GET_MODE (target), ret);
4022 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
4023 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
4024 gcc_assert (ret);
4026 return target;
4031 return expand_movstr (dst, src, target, /*endp=*/2);
4035 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
4036 arguments while being careful to avoid duplicate warnings (which could
4037 be issued if the expander were to expand the call, resulting in it
4038 being emitted in expand_call(). */
4040 static rtx
4041 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
4043 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
4045 /* The call has been successfully expanded. Check for nonstring
4046 arguments and issue warnings as appropriate. */
4047 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
4048 return ret;
4051 return NULL_RTX;
4054 /* Check a call EXP to the stpncpy built-in for validity.
4055 Return NULL_RTX on both success and failure. */
4057 static rtx
4058 expand_builtin_stpncpy (tree exp, rtx)
4060 if (!validate_arglist (exp,
4061 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4062 || !warn_stringop_overflow)
4063 return NULL_RTX;
4065 /* The source and destination of the call. */
4066 tree dest = CALL_EXPR_ARG (exp, 0);
4067 tree src = CALL_EXPR_ARG (exp, 1);
4069 /* The exact number of bytes to write (not the maximum). */
4070 tree len = CALL_EXPR_ARG (exp, 2);
4072 /* The size of the destination object. */
4073 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4075 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
4077 return NULL_RTX;
4080 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4081 bytes from constant string DATA + OFFSET and return it as target
4082 constant. */
4085 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
4086 scalar_int_mode mode)
4088 const char *str = (const char *) data;
4090 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4091 return const0_rtx;
4093 return c_readstr (str + offset, mode);
4096 /* Helper to check the sizes of sequences and the destination of calls
4097 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4098 success (no overflow or invalid sizes), false otherwise. */
4100 static bool
4101 check_strncat_sizes (tree exp, tree objsize)
4103 tree dest = CALL_EXPR_ARG (exp, 0);
4104 tree src = CALL_EXPR_ARG (exp, 1);
4105 tree maxread = CALL_EXPR_ARG (exp, 2);
4107 /* Try to determine the range of lengths that the source expression
4108 refers to. */
4109 tree lenrange[2];
4110 get_range_strlen (src, lenrange);
4112 /* Try to verify that the destination is big enough for the shortest
4113 string. */
4115 if (!objsize && warn_stringop_overflow)
4117 /* If it hasn't been provided by __strncat_chk, try to determine
4118 the size of the destination object into which the source is
4119 being copied. */
4120 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
4123 /* Add one for the terminating nul. */
4124 tree srclen = (lenrange[0]
4125 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4126 size_one_node)
4127 : NULL_TREE);
4129 /* The strncat function copies at most MAXREAD bytes and always appends
4130 the terminating nul so the specified upper bound should never be equal
4131 to (or greater than) the size of the destination. */
4132 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4133 && tree_int_cst_equal (objsize, maxread))
4135 location_t loc = tree_nonartificial_location (exp);
4136 loc = expansion_point_location_if_in_system_header (loc);
4138 warning_at (loc, OPT_Wstringop_overflow_,
4139 "%K%qD specified bound %E equals destination size",
4140 exp, get_callee_fndecl (exp), maxread);
4142 return false;
4145 if (!srclen
4146 || (maxread && tree_fits_uhwi_p (maxread)
4147 && tree_fits_uhwi_p (srclen)
4148 && tree_int_cst_lt (maxread, srclen)))
4149 srclen = maxread;
4151 /* The number of bytes to write is LEN but check_access will also
4152 check SRCLEN if LEN's value isn't known. */
4153 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4154 objsize);
4157 /* Similar to expand_builtin_strcat, do some very basic size validation
4158 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4159 the built-in expand to a call to the library function. */
4161 static rtx
4162 expand_builtin_strncat (tree exp, rtx)
4164 if (!validate_arglist (exp,
4165 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4166 || !warn_stringop_overflow)
4167 return NULL_RTX;
4169 tree dest = CALL_EXPR_ARG (exp, 0);
4170 tree src = CALL_EXPR_ARG (exp, 1);
4171 /* The upper bound on the number of bytes to write. */
4172 tree maxread = CALL_EXPR_ARG (exp, 2);
4173 /* The length of the source sequence. */
4174 tree slen = c_strlen (src, 1);
4176 /* Try to determine the range of lengths that the source expression
4177 refers to. */
4178 tree lenrange[2];
4179 if (slen)
4180 lenrange[0] = lenrange[1] = slen;
4181 else
4182 get_range_strlen (src, lenrange);
4184 /* Try to verify that the destination is big enough for the shortest
4185 string. First try to determine the size of the destination object
4186 into which the source is being copied. */
4187 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4189 /* Add one for the terminating nul. */
4190 tree srclen = (lenrange[0]
4191 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4192 size_one_node)
4193 : NULL_TREE);
4195 /* The strncat function copies at most MAXREAD bytes and always appends
4196 the terminating nul so the specified upper bound should never be equal
4197 to (or greater than) the size of the destination. */
4198 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4199 && tree_int_cst_equal (destsize, maxread))
4201 location_t loc = tree_nonartificial_location (exp);
4202 loc = expansion_point_location_if_in_system_header (loc);
4204 warning_at (loc, OPT_Wstringop_overflow_,
4205 "%K%qD specified bound %E equals destination size",
4206 exp, get_callee_fndecl (exp), maxread);
4208 return NULL_RTX;
4211 if (!srclen
4212 || (maxread && tree_fits_uhwi_p (maxread)
4213 && tree_fits_uhwi_p (srclen)
4214 && tree_int_cst_lt (maxread, srclen)))
4215 srclen = maxread;
4217 /* The number of bytes to write is SRCLEN. */
4218 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
4220 return NULL_RTX;
4223 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4224 NULL_RTX if we failed the caller should emit a normal call. */
4226 static rtx
4227 expand_builtin_strncpy (tree exp, rtx target)
4229 location_t loc = EXPR_LOCATION (exp);
4231 if (validate_arglist (exp,
4232 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4234 tree dest = CALL_EXPR_ARG (exp, 0);
4235 tree src = CALL_EXPR_ARG (exp, 1);
4236 /* The number of bytes to write (not the maximum). */
4237 tree len = CALL_EXPR_ARG (exp, 2);
4238 /* The length of the source sequence. */
4239 tree slen = c_strlen (src, 1);
4241 if (warn_stringop_overflow)
4243 tree destsize = compute_objsize (dest,
4244 warn_stringop_overflow - 1);
4246 /* The number of bytes to write is LEN but check_access will also
4247 check SLEN if LEN's value isn't known. */
4248 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4249 destsize);
4252 /* We must be passed a constant len and src parameter. */
4253 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4254 return NULL_RTX;
4256 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4258 /* We're required to pad with trailing zeros if the requested
4259 len is greater than strlen(s2)+1. In that case try to
4260 use store_by_pieces, if it fails, punt. */
4261 if (tree_int_cst_lt (slen, len))
4263 unsigned int dest_align = get_pointer_alignment (dest);
4264 const char *p = c_getstr (src);
4265 rtx dest_mem;
4267 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4268 || !can_store_by_pieces (tree_to_uhwi (len),
4269 builtin_strncpy_read_str,
4270 CONST_CAST (char *, p),
4271 dest_align, false))
4272 return NULL_RTX;
4274 dest_mem = get_memory_rtx (dest, len);
4275 store_by_pieces (dest_mem, tree_to_uhwi (len),
4276 builtin_strncpy_read_str,
4277 CONST_CAST (char *, p), dest_align, false, 0);
4278 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4279 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4280 return dest_mem;
4283 return NULL_RTX;
4286 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4287 bytes from constant string DATA + OFFSET and return it as target
4288 constant. */
4291 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4292 scalar_int_mode mode)
4294 const char *c = (const char *) data;
4295 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4297 memset (p, *c, GET_MODE_SIZE (mode));
4299 return c_readstr (p, mode);
4302 /* Callback routine for store_by_pieces. Return the RTL of a register
4303 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4304 char value given in the RTL register data. For example, if mode is
4305 4 bytes wide, return the RTL for 0x01010101*data. */
4307 static rtx
4308 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4309 scalar_int_mode mode)
4311 rtx target, coeff;
4312 size_t size;
4313 char *p;
4315 size = GET_MODE_SIZE (mode);
4316 if (size == 1)
4317 return (rtx) data;
4319 p = XALLOCAVEC (char, size);
4320 memset (p, 1, size);
4321 coeff = c_readstr (p, mode);
4323 target = convert_to_mode (mode, (rtx) data, 1);
4324 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4325 return force_reg (mode, target);
4328 /* Expand expression EXP, which is a call to the memset builtin. Return
4329 NULL_RTX if we failed the caller should emit a normal call, otherwise
4330 try to get the result in TARGET, if convenient (and in mode MODE if that's
4331 convenient). */
4333 static rtx
4334 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4336 if (!validate_arglist (exp,
4337 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4338 return NULL_RTX;
4340 tree dest = CALL_EXPR_ARG (exp, 0);
4341 tree val = CALL_EXPR_ARG (exp, 1);
4342 tree len = CALL_EXPR_ARG (exp, 2);
4344 check_memop_access (exp, dest, NULL_TREE, len);
4346 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4349 /* Helper function to do the actual work for expand_builtin_memset. The
4350 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4351 so that this can also be called without constructing an actual CALL_EXPR.
4352 The other arguments and return value are the same as for
4353 expand_builtin_memset. */
4355 static rtx
4356 expand_builtin_memset_args (tree dest, tree val, tree len,
4357 rtx target, machine_mode mode, tree orig_exp)
4359 tree fndecl, fn;
4360 enum built_in_function fcode;
4361 machine_mode val_mode;
4362 char c;
4363 unsigned int dest_align;
4364 rtx dest_mem, dest_addr, len_rtx;
4365 HOST_WIDE_INT expected_size = -1;
4366 unsigned int expected_align = 0;
4367 unsigned HOST_WIDE_INT min_size;
4368 unsigned HOST_WIDE_INT max_size;
4369 unsigned HOST_WIDE_INT probable_max_size;
4371 dest_align = get_pointer_alignment (dest);
4373 /* If DEST is not a pointer type, don't do this operation in-line. */
4374 if (dest_align == 0)
4375 return NULL_RTX;
4377 if (currently_expanding_gimple_stmt)
4378 stringop_block_profile (currently_expanding_gimple_stmt,
4379 &expected_align, &expected_size);
4381 if (expected_align < dest_align)
4382 expected_align = dest_align;
4384 /* If the LEN parameter is zero, return DEST. */
4385 if (integer_zerop (len))
4387 /* Evaluate and ignore VAL in case it has side-effects. */
4388 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4389 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4392 /* Stabilize the arguments in case we fail. */
4393 dest = builtin_save_expr (dest);
4394 val = builtin_save_expr (val);
4395 len = builtin_save_expr (len);
4397 len_rtx = expand_normal (len);
4398 determine_block_size (len, len_rtx, &min_size, &max_size,
4399 &probable_max_size);
4400 dest_mem = get_memory_rtx (dest, len);
4401 val_mode = TYPE_MODE (unsigned_char_type_node);
4403 if (TREE_CODE (val) != INTEGER_CST)
4405 rtx val_rtx;
4407 val_rtx = expand_normal (val);
4408 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4410 /* Assume that we can memset by pieces if we can store
4411 * the coefficients by pieces (in the required modes).
4412 * We can't pass builtin_memset_gen_str as that emits RTL. */
4413 c = 1;
4414 if (tree_fits_uhwi_p (len)
4415 && can_store_by_pieces (tree_to_uhwi (len),
4416 builtin_memset_read_str, &c, dest_align,
4417 true))
4419 val_rtx = force_reg (val_mode, val_rtx);
4420 store_by_pieces (dest_mem, tree_to_uhwi (len),
4421 builtin_memset_gen_str, val_rtx, dest_align,
4422 true, 0);
4424 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4425 dest_align, expected_align,
4426 expected_size, min_size, max_size,
4427 probable_max_size))
4428 goto do_libcall;
4430 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4431 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4432 return dest_mem;
4435 if (target_char_cast (val, &c))
4436 goto do_libcall;
4438 if (c)
4440 if (tree_fits_uhwi_p (len)
4441 && can_store_by_pieces (tree_to_uhwi (len),
4442 builtin_memset_read_str, &c, dest_align,
4443 true))
4444 store_by_pieces (dest_mem, tree_to_uhwi (len),
4445 builtin_memset_read_str, &c, dest_align, true, 0);
4446 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4447 gen_int_mode (c, val_mode),
4448 dest_align, expected_align,
4449 expected_size, min_size, max_size,
4450 probable_max_size))
4451 goto do_libcall;
4453 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4454 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4455 return dest_mem;
4458 set_mem_align (dest_mem, dest_align);
4459 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4460 CALL_EXPR_TAILCALL (orig_exp)
4461 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4462 expected_align, expected_size,
4463 min_size, max_size,
4464 probable_max_size);
4466 if (dest_addr == 0)
4468 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4469 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4472 return dest_addr;
4474 do_libcall:
4475 fndecl = get_callee_fndecl (orig_exp);
4476 fcode = DECL_FUNCTION_CODE (fndecl);
4477 if (fcode == BUILT_IN_MEMSET)
4478 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4479 dest, val, len);
4480 else if (fcode == BUILT_IN_BZERO)
4481 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4482 dest, len);
4483 else
4484 gcc_unreachable ();
4485 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4486 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4487 return expand_call (fn, target, target == const0_rtx);
4490 /* Expand expression EXP, which is a call to the bzero builtin. Return
4491 NULL_RTX if we failed the caller should emit a normal call. */
4493 static rtx
4494 expand_builtin_bzero (tree exp)
4496 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4497 return NULL_RTX;
4499 tree dest = CALL_EXPR_ARG (exp, 0);
4500 tree size = CALL_EXPR_ARG (exp, 1);
4502 check_memop_access (exp, dest, NULL_TREE, size);
4504 /* New argument list transforming bzero(ptr x, int y) to
4505 memset(ptr x, int 0, size_t y). This is done this way
4506 so that if it isn't expanded inline, we fallback to
4507 calling bzero instead of memset. */
4509 location_t loc = EXPR_LOCATION (exp);
4511 return expand_builtin_memset_args (dest, integer_zero_node,
4512 fold_convert_loc (loc,
4513 size_type_node, size),
4514 const0_rtx, VOIDmode, exp);
4517 /* Try to expand cmpstr operation ICODE with the given operands.
4518 Return the result rtx on success, otherwise return null. */
4520 static rtx
4521 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4522 HOST_WIDE_INT align)
4524 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4526 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4527 target = NULL_RTX;
4529 struct expand_operand ops[4];
4530 create_output_operand (&ops[0], target, insn_mode);
4531 create_fixed_operand (&ops[1], arg1_rtx);
4532 create_fixed_operand (&ops[2], arg2_rtx);
4533 create_integer_operand (&ops[3], align);
4534 if (maybe_expand_insn (icode, 4, ops))
4535 return ops[0].value;
4536 return NULL_RTX;
4539 /* Expand expression EXP, which is a call to the memcmp built-in function.
4540 Return NULL_RTX if we failed and the caller should emit a normal call,
4541 otherwise try to get the result in TARGET, if convenient.
4542 RESULT_EQ is true if we can relax the returned value to be either zero
4543 or nonzero, without caring about the sign. */
4545 static rtx
4546 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4548 if (!validate_arglist (exp,
4549 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4550 return NULL_RTX;
4552 tree arg1 = CALL_EXPR_ARG (exp, 0);
4553 tree arg2 = CALL_EXPR_ARG (exp, 1);
4554 tree len = CALL_EXPR_ARG (exp, 2);
4555 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4556 bool no_overflow = true;
4558 /* Diagnose calls where the specified length exceeds the size of either
4559 object. */
4560 tree size = compute_objsize (arg1, 0);
4561 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4562 len, /*maxread=*/NULL_TREE, size,
4563 /*objsize=*/NULL_TREE);
4564 if (no_overflow)
4566 size = compute_objsize (arg2, 0);
4567 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4568 len, /*maxread=*/NULL_TREE, size,
4569 /*objsize=*/NULL_TREE);
4572 /* If the specified length exceeds the size of either object,
4573 call the function. */
4574 if (!no_overflow)
4575 return NULL_RTX;
4577 /* Due to the performance benefit, always inline the calls first
4578 when result_eq is false. */
4579 rtx result = NULL_RTX;
4581 if (!result_eq && fcode != BUILT_IN_BCMP)
4583 result = inline_expand_builtin_string_cmp (exp, target);
4584 if (result)
4585 return result;
4588 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4589 location_t loc = EXPR_LOCATION (exp);
4591 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4592 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4594 /* If we don't have POINTER_TYPE, call the function. */
4595 if (arg1_align == 0 || arg2_align == 0)
4596 return NULL_RTX;
4598 rtx arg1_rtx = get_memory_rtx (arg1, len);
4599 rtx arg2_rtx = get_memory_rtx (arg2, len);
4600 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4602 /* Set MEM_SIZE as appropriate. */
4603 if (CONST_INT_P (len_rtx))
4605 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4606 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4609 by_pieces_constfn constfn = NULL;
4611 const char *src_str = c_getstr (arg2);
4612 if (result_eq && src_str == NULL)
4614 src_str = c_getstr (arg1);
4615 if (src_str != NULL)
4616 std::swap (arg1_rtx, arg2_rtx);
4619 /* If SRC is a string constant and block move would be done
4620 by pieces, we can avoid loading the string from memory
4621 and only stored the computed constants. */
4622 if (src_str
4623 && CONST_INT_P (len_rtx)
4624 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4625 constfn = builtin_memcpy_read_str;
4627 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4628 TREE_TYPE (len), target,
4629 result_eq, constfn,
4630 CONST_CAST (char *, src_str));
4632 if (result)
4634 /* Return the value in the proper mode for this function. */
4635 if (GET_MODE (result) == mode)
4636 return result;
4638 if (target != 0)
4640 convert_move (target, result, 0);
4641 return target;
4644 return convert_to_mode (mode, result, 0);
4647 return NULL_RTX;
4650 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4651 if we failed the caller should emit a normal call, otherwise try to get
4652 the result in TARGET, if convenient. */
4654 static rtx
4655 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4657 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4658 return NULL_RTX;
4660 /* Due to the performance benefit, always inline the calls first. */
4661 rtx result = NULL_RTX;
4662 result = inline_expand_builtin_string_cmp (exp, target);
4663 if (result)
4664 return result;
4666 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4667 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4668 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4669 return NULL_RTX;
4671 tree arg1 = CALL_EXPR_ARG (exp, 0);
4672 tree arg2 = CALL_EXPR_ARG (exp, 1);
4674 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4675 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4677 /* If we don't have POINTER_TYPE, call the function. */
4678 if (arg1_align == 0 || arg2_align == 0)
4679 return NULL_RTX;
4681 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4682 arg1 = builtin_save_expr (arg1);
4683 arg2 = builtin_save_expr (arg2);
4685 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4686 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4688 /* Try to call cmpstrsi. */
4689 if (cmpstr_icode != CODE_FOR_nothing)
4690 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4691 MIN (arg1_align, arg2_align));
4693 /* Try to determine at least one length and call cmpstrnsi. */
4694 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4696 tree len;
4697 rtx arg3_rtx;
4699 tree len1 = c_strlen (arg1, 1);
4700 tree len2 = c_strlen (arg2, 1);
4702 if (len1)
4703 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4704 if (len2)
4705 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4707 /* If we don't have a constant length for the first, use the length
4708 of the second, if we know it. We don't require a constant for
4709 this case; some cost analysis could be done if both are available
4710 but neither is constant. For now, assume they're equally cheap,
4711 unless one has side effects. If both strings have constant lengths,
4712 use the smaller. */
4714 if (!len1)
4715 len = len2;
4716 else if (!len2)
4717 len = len1;
4718 else if (TREE_SIDE_EFFECTS (len1))
4719 len = len2;
4720 else if (TREE_SIDE_EFFECTS (len2))
4721 len = len1;
4722 else if (TREE_CODE (len1) != INTEGER_CST)
4723 len = len2;
4724 else if (TREE_CODE (len2) != INTEGER_CST)
4725 len = len1;
4726 else if (tree_int_cst_lt (len1, len2))
4727 len = len1;
4728 else
4729 len = len2;
4731 /* If both arguments have side effects, we cannot optimize. */
4732 if (len && !TREE_SIDE_EFFECTS (len))
4734 arg3_rtx = expand_normal (len);
4735 result = expand_cmpstrn_or_cmpmem
4736 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4737 arg3_rtx, MIN (arg1_align, arg2_align));
4741 tree fndecl = get_callee_fndecl (exp);
4742 if (result)
4744 /* Check to see if the argument was declared attribute nonstring
4745 and if so, issue a warning since at this point it's not known
4746 to be nul-terminated. */
4747 maybe_warn_nonstring_arg (fndecl, exp);
4749 /* Return the value in the proper mode for this function. */
4750 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4751 if (GET_MODE (result) == mode)
4752 return result;
4753 if (target == 0)
4754 return convert_to_mode (mode, result, 0);
4755 convert_move (target, result, 0);
4756 return target;
4759 /* Expand the library call ourselves using a stabilized argument
4760 list to avoid re-evaluating the function's arguments twice. */
4761 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4762 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4763 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4764 return expand_call (fn, target, target == const0_rtx);
4767 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4768 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4769 the result in TARGET, if convenient. */
4771 static rtx
4772 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4773 ATTRIBUTE_UNUSED machine_mode mode)
4775 if (!validate_arglist (exp,
4776 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4777 return NULL_RTX;
4779 /* Due to the performance benefit, always inline the calls first. */
4780 rtx result = NULL_RTX;
4781 result = inline_expand_builtin_string_cmp (exp, target);
4782 if (result)
4783 return result;
4785 /* If c_strlen can determine an expression for one of the string
4786 lengths, and it doesn't have side effects, then emit cmpstrnsi
4787 using length MIN(strlen(string)+1, arg3). */
4788 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4789 if (cmpstrn_icode == CODE_FOR_nothing)
4790 return NULL_RTX;
4792 tree len;
4794 tree arg1 = CALL_EXPR_ARG (exp, 0);
4795 tree arg2 = CALL_EXPR_ARG (exp, 1);
4796 tree arg3 = CALL_EXPR_ARG (exp, 2);
4798 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4799 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4801 tree len1 = c_strlen (arg1, 1);
4802 tree len2 = c_strlen (arg2, 1);
4804 location_t loc = EXPR_LOCATION (exp);
4806 if (len1)
4807 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4808 if (len2)
4809 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4811 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4813 /* If we don't have a constant length for the first, use the length
4814 of the second, if we know it. If neither string is constant length,
4815 use the given length argument. We don't require a constant for
4816 this case; some cost analysis could be done if both are available
4817 but neither is constant. For now, assume they're equally cheap,
4818 unless one has side effects. If both strings have constant lengths,
4819 use the smaller. */
4821 if (!len1 && !len2)
4822 len = len3;
4823 else if (!len1)
4824 len = len2;
4825 else if (!len2)
4826 len = len1;
4827 else if (TREE_SIDE_EFFECTS (len1))
4828 len = len2;
4829 else if (TREE_SIDE_EFFECTS (len2))
4830 len = len1;
4831 else if (TREE_CODE (len1) != INTEGER_CST)
4832 len = len2;
4833 else if (TREE_CODE (len2) != INTEGER_CST)
4834 len = len1;
4835 else if (tree_int_cst_lt (len1, len2))
4836 len = len1;
4837 else
4838 len = len2;
4840 /* If we are not using the given length, we must incorporate it here.
4841 The actual new length parameter will be MIN(len,arg3) in this case. */
4842 if (len != len3)
4844 len = fold_convert_loc (loc, sizetype, len);
4845 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4847 rtx arg1_rtx = get_memory_rtx (arg1, len);
4848 rtx arg2_rtx = get_memory_rtx (arg2, len);
4849 rtx arg3_rtx = expand_normal (len);
4850 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4851 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4852 MIN (arg1_align, arg2_align));
4854 tree fndecl = get_callee_fndecl (exp);
4855 if (result)
4857 /* Check to see if the argument was declared attribute nonstring
4858 and if so, issue a warning since at this point it's not known
4859 to be nul-terminated. */
4860 maybe_warn_nonstring_arg (fndecl, exp);
4862 /* Return the value in the proper mode for this function. */
4863 mode = TYPE_MODE (TREE_TYPE (exp));
4864 if (GET_MODE (result) == mode)
4865 return result;
4866 if (target == 0)
4867 return convert_to_mode (mode, result, 0);
4868 convert_move (target, result, 0);
4869 return target;
4872 /* Expand the library call ourselves using a stabilized argument
4873 list to avoid re-evaluating the function's arguments twice. */
4874 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4875 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4876 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4877 return expand_call (fn, target, target == const0_rtx);
4880 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4881 if that's convenient. */
4884 expand_builtin_saveregs (void)
4886 rtx val;
4887 rtx_insn *seq;
4889 /* Don't do __builtin_saveregs more than once in a function.
4890 Save the result of the first call and reuse it. */
4891 if (saveregs_value != 0)
4892 return saveregs_value;
4894 /* When this function is called, it means that registers must be
4895 saved on entry to this function. So we migrate the call to the
4896 first insn of this function. */
4898 start_sequence ();
4900 /* Do whatever the machine needs done in this case. */
4901 val = targetm.calls.expand_builtin_saveregs ();
4903 seq = get_insns ();
4904 end_sequence ();
4906 saveregs_value = val;
4908 /* Put the insns after the NOTE that starts the function. If this
4909 is inside a start_sequence, make the outer-level insn chain current, so
4910 the code is placed at the start of the function. */
4911 push_topmost_sequence ();
4912 emit_insn_after (seq, entry_of_function ());
4913 pop_topmost_sequence ();
4915 return val;
4918 /* Expand a call to __builtin_next_arg. */
4920 static rtx
4921 expand_builtin_next_arg (void)
4923 /* Checking arguments is already done in fold_builtin_next_arg
4924 that must be called before this function. */
4925 return expand_binop (ptr_mode, add_optab,
4926 crtl->args.internal_arg_pointer,
4927 crtl->args.arg_offset_rtx,
4928 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4931 /* Make it easier for the backends by protecting the valist argument
4932 from multiple evaluations. */
4934 static tree
4935 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4937 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4939 /* The current way of determining the type of valist is completely
4940 bogus. We should have the information on the va builtin instead. */
4941 if (!vatype)
4942 vatype = targetm.fn_abi_va_list (cfun->decl);
4944 if (TREE_CODE (vatype) == ARRAY_TYPE)
4946 if (TREE_SIDE_EFFECTS (valist))
4947 valist = save_expr (valist);
4949 /* For this case, the backends will be expecting a pointer to
4950 vatype, but it's possible we've actually been given an array
4951 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4952 So fix it. */
4953 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4955 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4956 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4959 else
4961 tree pt = build_pointer_type (vatype);
4963 if (! needs_lvalue)
4965 if (! TREE_SIDE_EFFECTS (valist))
4966 return valist;
4968 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4969 TREE_SIDE_EFFECTS (valist) = 1;
4972 if (TREE_SIDE_EFFECTS (valist))
4973 valist = save_expr (valist);
4974 valist = fold_build2_loc (loc, MEM_REF,
4975 vatype, valist, build_int_cst (pt, 0));
4978 return valist;
4981 /* The "standard" definition of va_list is void*. */
4983 tree
4984 std_build_builtin_va_list (void)
4986 return ptr_type_node;
4989 /* The "standard" abi va_list is va_list_type_node. */
4991 tree
4992 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4994 return va_list_type_node;
4997 /* The "standard" type of va_list is va_list_type_node. */
4999 tree
5000 std_canonical_va_list_type (tree type)
5002 tree wtype, htype;
5004 wtype = va_list_type_node;
5005 htype = type;
5007 if (TREE_CODE (wtype) == ARRAY_TYPE)
5009 /* If va_list is an array type, the argument may have decayed
5010 to a pointer type, e.g. by being passed to another function.
5011 In that case, unwrap both types so that we can compare the
5012 underlying records. */
5013 if (TREE_CODE (htype) == ARRAY_TYPE
5014 || POINTER_TYPE_P (htype))
5016 wtype = TREE_TYPE (wtype);
5017 htype = TREE_TYPE (htype);
5020 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
5021 return va_list_type_node;
5023 return NULL_TREE;
5026 /* The "standard" implementation of va_start: just assign `nextarg' to
5027 the variable. */
5029 void
5030 std_expand_builtin_va_start (tree valist, rtx nextarg)
5032 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
5033 convert_move (va_r, nextarg, 0);
5036 /* Expand EXP, a call to __builtin_va_start. */
5038 static rtx
5039 expand_builtin_va_start (tree exp)
5041 rtx nextarg;
5042 tree valist;
5043 location_t loc = EXPR_LOCATION (exp);
5045 if (call_expr_nargs (exp) < 2)
5047 error_at (loc, "too few arguments to function %<va_start%>");
5048 return const0_rtx;
5051 if (fold_builtin_next_arg (exp, true))
5052 return const0_rtx;
5054 nextarg = expand_builtin_next_arg ();
5055 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
5057 if (targetm.expand_builtin_va_start)
5058 targetm.expand_builtin_va_start (valist, nextarg);
5059 else
5060 std_expand_builtin_va_start (valist, nextarg);
5062 return const0_rtx;
5065 /* Expand EXP, a call to __builtin_va_end. */
5067 static rtx
5068 expand_builtin_va_end (tree exp)
5070 tree valist = CALL_EXPR_ARG (exp, 0);
5072 /* Evaluate for side effects, if needed. I hate macros that don't
5073 do that. */
5074 if (TREE_SIDE_EFFECTS (valist))
5075 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5077 return const0_rtx;
5080 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5081 builtin rather than just as an assignment in stdarg.h because of the
5082 nastiness of array-type va_list types. */
5084 static rtx
5085 expand_builtin_va_copy (tree exp)
5087 tree dst, src, t;
5088 location_t loc = EXPR_LOCATION (exp);
5090 dst = CALL_EXPR_ARG (exp, 0);
5091 src = CALL_EXPR_ARG (exp, 1);
5093 dst = stabilize_va_list_loc (loc, dst, 1);
5094 src = stabilize_va_list_loc (loc, src, 0);
5096 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5098 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5100 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5101 TREE_SIDE_EFFECTS (t) = 1;
5102 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5104 else
5106 rtx dstb, srcb, size;
5108 /* Evaluate to pointers. */
5109 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5110 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5111 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5112 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5114 dstb = convert_memory_address (Pmode, dstb);
5115 srcb = convert_memory_address (Pmode, srcb);
5117 /* "Dereference" to BLKmode memories. */
5118 dstb = gen_rtx_MEM (BLKmode, dstb);
5119 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5120 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5121 srcb = gen_rtx_MEM (BLKmode, srcb);
5122 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5123 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5125 /* Copy. */
5126 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5129 return const0_rtx;
5132 /* Expand a call to one of the builtin functions __builtin_frame_address or
5133 __builtin_return_address. */
5135 static rtx
5136 expand_builtin_frame_address (tree fndecl, tree exp)
5138 /* The argument must be a nonnegative integer constant.
5139 It counts the number of frames to scan up the stack.
5140 The value is either the frame pointer value or the return
5141 address saved in that frame. */
5142 if (call_expr_nargs (exp) == 0)
5143 /* Warning about missing arg was already issued. */
5144 return const0_rtx;
5145 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5147 error ("invalid argument to %qD", fndecl);
5148 return const0_rtx;
5150 else
5152 /* Number of frames to scan up the stack. */
5153 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5155 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5157 /* Some ports cannot access arbitrary stack frames. */
5158 if (tem == NULL)
5160 warning (0, "unsupported argument to %qD", fndecl);
5161 return const0_rtx;
5164 if (count)
5166 /* Warn since no effort is made to ensure that any frame
5167 beyond the current one exists or can be safely reached. */
5168 warning (OPT_Wframe_address, "calling %qD with "
5169 "a nonzero argument is unsafe", fndecl);
5172 /* For __builtin_frame_address, return what we've got. */
5173 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5174 return tem;
5176 if (!REG_P (tem)
5177 && ! CONSTANT_P (tem))
5178 tem = copy_addr_to_reg (tem);
5179 return tem;
5183 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5184 failed and the caller should emit a normal call. */
5186 static rtx
5187 expand_builtin_alloca (tree exp)
5189 rtx op0;
5190 rtx result;
5191 unsigned int align;
5192 tree fndecl = get_callee_fndecl (exp);
5193 HOST_WIDE_INT max_size;
5194 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5195 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5196 bool valid_arglist
5197 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5198 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5199 VOID_TYPE)
5200 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5201 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5202 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5204 if (!valid_arglist)
5205 return NULL_RTX;
5207 if ((alloca_for_var
5208 && warn_vla_limit >= HOST_WIDE_INT_MAX
5209 && warn_alloc_size_limit < warn_vla_limit)
5210 || (!alloca_for_var
5211 && warn_alloca_limit >= HOST_WIDE_INT_MAX
5212 && warn_alloc_size_limit < warn_alloca_limit
5215 /* -Walloca-larger-than and -Wvla-larger-than settings of
5216 less than HOST_WIDE_INT_MAX override the more general
5217 -Walloc-size-larger-than so unless either of the former
5218 options is smaller than the last one (wchich would imply
5219 that the call was already checked), check the alloca
5220 arguments for overflow. */
5221 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5222 int idx[] = { 0, -1 };
5223 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5226 /* Compute the argument. */
5227 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5229 /* Compute the alignment. */
5230 align = (fcode == BUILT_IN_ALLOCA
5231 ? BIGGEST_ALIGNMENT
5232 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5234 /* Compute the maximum size. */
5235 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5236 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5237 : -1);
5239 /* Allocate the desired space. If the allocation stems from the declaration
5240 of a variable-sized object, it cannot accumulate. */
5241 result
5242 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5243 result = convert_memory_address (ptr_mode, result);
5245 return result;
5248 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5249 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5250 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5251 handle_builtin_stack_restore function. */
5253 static rtx
5254 expand_asan_emit_allocas_unpoison (tree exp)
5256 tree arg0 = CALL_EXPR_ARG (exp, 0);
5257 tree arg1 = CALL_EXPR_ARG (exp, 1);
5258 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5259 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5260 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5261 stack_pointer_rtx, NULL_RTX, 0,
5262 OPTAB_LIB_WIDEN);
5263 off = convert_modes (ptr_mode, Pmode, off, 0);
5264 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5265 OPTAB_LIB_WIDEN);
5266 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5267 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5268 top, ptr_mode, bot, ptr_mode);
5269 return ret;
5272 /* Expand a call to bswap builtin in EXP.
5273 Return NULL_RTX if a normal call should be emitted rather than expanding the
5274 function in-line. If convenient, the result should be placed in TARGET.
5275 SUBTARGET may be used as the target for computing one of EXP's operands. */
5277 static rtx
5278 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5279 rtx subtarget)
5281 tree arg;
5282 rtx op0;
5284 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5285 return NULL_RTX;
5287 arg = CALL_EXPR_ARG (exp, 0);
5288 op0 = expand_expr (arg,
5289 subtarget && GET_MODE (subtarget) == target_mode
5290 ? subtarget : NULL_RTX,
5291 target_mode, EXPAND_NORMAL);
5292 if (GET_MODE (op0) != target_mode)
5293 op0 = convert_to_mode (target_mode, op0, 1);
5295 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5297 gcc_assert (target);
5299 return convert_to_mode (target_mode, target, 1);
5302 /* Expand a call to a unary builtin in EXP.
5303 Return NULL_RTX if a normal call should be emitted rather than expanding the
5304 function in-line. If convenient, the result should be placed in TARGET.
5305 SUBTARGET may be used as the target for computing one of EXP's operands. */
5307 static rtx
5308 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5309 rtx subtarget, optab op_optab)
5311 rtx op0;
5313 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5314 return NULL_RTX;
5316 /* Compute the argument. */
5317 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5318 (subtarget
5319 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5320 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5321 VOIDmode, EXPAND_NORMAL);
5322 /* Compute op, into TARGET if possible.
5323 Set TARGET to wherever the result comes back. */
5324 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5325 op_optab, op0, target, op_optab != clrsb_optab);
5326 gcc_assert (target);
5328 return convert_to_mode (target_mode, target, 0);
5331 /* Expand a call to __builtin_expect. We just return our argument
5332 as the builtin_expect semantic should've been already executed by
5333 tree branch prediction pass. */
5335 static rtx
5336 expand_builtin_expect (tree exp, rtx target)
5338 tree arg;
5340 if (call_expr_nargs (exp) < 2)
5341 return const0_rtx;
5342 arg = CALL_EXPR_ARG (exp, 0);
5344 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5345 /* When guessing was done, the hints should be already stripped away. */
5346 gcc_assert (!flag_guess_branch_prob
5347 || optimize == 0 || seen_error ());
5348 return target;
5351 /* Expand a call to __builtin_expect_with_probability. We just return our
5352 argument as the builtin_expect semantic should've been already executed by
5353 tree branch prediction pass. */
5355 static rtx
5356 expand_builtin_expect_with_probability (tree exp, rtx target)
5358 tree arg;
5360 if (call_expr_nargs (exp) < 3)
5361 return const0_rtx;
5362 arg = CALL_EXPR_ARG (exp, 0);
5364 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5365 /* When guessing was done, the hints should be already stripped away. */
5366 gcc_assert (!flag_guess_branch_prob
5367 || optimize == 0 || seen_error ());
5368 return target;
5372 /* Expand a call to __builtin_assume_aligned. We just return our first
5373 argument as the builtin_assume_aligned semantic should've been already
5374 executed by CCP. */
5376 static rtx
5377 expand_builtin_assume_aligned (tree exp, rtx target)
5379 if (call_expr_nargs (exp) < 2)
5380 return const0_rtx;
5381 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5382 EXPAND_NORMAL);
5383 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5384 && (call_expr_nargs (exp) < 3
5385 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5386 return target;
5389 void
5390 expand_builtin_trap (void)
5392 if (targetm.have_trap ())
5394 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5395 /* For trap insns when not accumulating outgoing args force
5396 REG_ARGS_SIZE note to prevent crossjumping of calls with
5397 different args sizes. */
5398 if (!ACCUMULATE_OUTGOING_ARGS)
5399 add_args_size_note (insn, stack_pointer_delta);
5401 else
5403 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5404 tree call_expr = build_call_expr (fn, 0);
5405 expand_call (call_expr, NULL_RTX, false);
5408 emit_barrier ();
5411 /* Expand a call to __builtin_unreachable. We do nothing except emit
5412 a barrier saying that control flow will not pass here.
5414 It is the responsibility of the program being compiled to ensure
5415 that control flow does never reach __builtin_unreachable. */
5416 static void
5417 expand_builtin_unreachable (void)
5419 emit_barrier ();
5422 /* Expand EXP, a call to fabs, fabsf or fabsl.
5423 Return NULL_RTX if a normal call should be emitted rather than expanding
5424 the function inline. If convenient, the result should be placed
5425 in TARGET. SUBTARGET may be used as the target for computing
5426 the operand. */
5428 static rtx
5429 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5431 machine_mode mode;
5432 tree arg;
5433 rtx op0;
5435 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5436 return NULL_RTX;
5438 arg = CALL_EXPR_ARG (exp, 0);
5439 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5440 mode = TYPE_MODE (TREE_TYPE (arg));
5441 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5442 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5445 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5446 Return NULL is a normal call should be emitted rather than expanding the
5447 function inline. If convenient, the result should be placed in TARGET.
5448 SUBTARGET may be used as the target for computing the operand. */
5450 static rtx
5451 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5453 rtx op0, op1;
5454 tree arg;
5456 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5457 return NULL_RTX;
5459 arg = CALL_EXPR_ARG (exp, 0);
5460 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5462 arg = CALL_EXPR_ARG (exp, 1);
5463 op1 = expand_normal (arg);
5465 return expand_copysign (op0, op1, target);
5468 /* Expand a call to __builtin___clear_cache. */
5470 static rtx
5471 expand_builtin___clear_cache (tree exp)
5473 if (!targetm.code_for_clear_cache)
5475 #ifdef CLEAR_INSN_CACHE
5476 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5477 does something. Just do the default expansion to a call to
5478 __clear_cache(). */
5479 return NULL_RTX;
5480 #else
5481 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5482 does nothing. There is no need to call it. Do nothing. */
5483 return const0_rtx;
5484 #endif /* CLEAR_INSN_CACHE */
5487 /* We have a "clear_cache" insn, and it will handle everything. */
5488 tree begin, end;
5489 rtx begin_rtx, end_rtx;
5491 /* We must not expand to a library call. If we did, any
5492 fallback library function in libgcc that might contain a call to
5493 __builtin___clear_cache() would recurse infinitely. */
5494 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5496 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5497 return const0_rtx;
5500 if (targetm.have_clear_cache ())
5502 struct expand_operand ops[2];
5504 begin = CALL_EXPR_ARG (exp, 0);
5505 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5507 end = CALL_EXPR_ARG (exp, 1);
5508 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5510 create_address_operand (&ops[0], begin_rtx);
5511 create_address_operand (&ops[1], end_rtx);
5512 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5513 return const0_rtx;
5515 return const0_rtx;
5518 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5520 static rtx
5521 round_trampoline_addr (rtx tramp)
5523 rtx temp, addend, mask;
5525 /* If we don't need too much alignment, we'll have been guaranteed
5526 proper alignment by get_trampoline_type. */
5527 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5528 return tramp;
5530 /* Round address up to desired boundary. */
5531 temp = gen_reg_rtx (Pmode);
5532 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5533 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5535 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5536 temp, 0, OPTAB_LIB_WIDEN);
5537 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5538 temp, 0, OPTAB_LIB_WIDEN);
5540 return tramp;
5543 static rtx
5544 expand_builtin_init_trampoline (tree exp, bool onstack)
5546 tree t_tramp, t_func, t_chain;
5547 rtx m_tramp, r_tramp, r_chain, tmp;
5549 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5550 POINTER_TYPE, VOID_TYPE))
5551 return NULL_RTX;
5553 t_tramp = CALL_EXPR_ARG (exp, 0);
5554 t_func = CALL_EXPR_ARG (exp, 1);
5555 t_chain = CALL_EXPR_ARG (exp, 2);
5557 r_tramp = expand_normal (t_tramp);
5558 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5559 MEM_NOTRAP_P (m_tramp) = 1;
5561 /* If ONSTACK, the TRAMP argument should be the address of a field
5562 within the local function's FRAME decl. Either way, let's see if
5563 we can fill in the MEM_ATTRs for this memory. */
5564 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5565 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5567 /* Creator of a heap trampoline is responsible for making sure the
5568 address is aligned to at least STACK_BOUNDARY. Normally malloc
5569 will ensure this anyhow. */
5570 tmp = round_trampoline_addr (r_tramp);
5571 if (tmp != r_tramp)
5573 m_tramp = change_address (m_tramp, BLKmode, tmp);
5574 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5575 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5578 /* The FUNC argument should be the address of the nested function.
5579 Extract the actual function decl to pass to the hook. */
5580 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5581 t_func = TREE_OPERAND (t_func, 0);
5582 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5584 r_chain = expand_normal (t_chain);
5586 /* Generate insns to initialize the trampoline. */
5587 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5589 if (onstack)
5591 trampolines_created = 1;
5593 if (targetm.calls.custom_function_descriptors != 0)
5594 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5595 "trampoline generated for nested function %qD", t_func);
5598 return const0_rtx;
5601 static rtx
5602 expand_builtin_adjust_trampoline (tree exp)
5604 rtx tramp;
5606 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5607 return NULL_RTX;
5609 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5610 tramp = round_trampoline_addr (tramp);
5611 if (targetm.calls.trampoline_adjust_address)
5612 tramp = targetm.calls.trampoline_adjust_address (tramp);
5614 return tramp;
5617 /* Expand a call to the builtin descriptor initialization routine.
5618 A descriptor is made up of a couple of pointers to the static
5619 chain and the code entry in this order. */
5621 static rtx
5622 expand_builtin_init_descriptor (tree exp)
5624 tree t_descr, t_func, t_chain;
5625 rtx m_descr, r_descr, r_func, r_chain;
5627 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5628 VOID_TYPE))
5629 return NULL_RTX;
5631 t_descr = CALL_EXPR_ARG (exp, 0);
5632 t_func = CALL_EXPR_ARG (exp, 1);
5633 t_chain = CALL_EXPR_ARG (exp, 2);
5635 r_descr = expand_normal (t_descr);
5636 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5637 MEM_NOTRAP_P (m_descr) = 1;
5639 r_func = expand_normal (t_func);
5640 r_chain = expand_normal (t_chain);
5642 /* Generate insns to initialize the descriptor. */
5643 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5644 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5645 POINTER_SIZE / BITS_PER_UNIT), r_func);
5647 return const0_rtx;
5650 /* Expand a call to the builtin descriptor adjustment routine. */
5652 static rtx
5653 expand_builtin_adjust_descriptor (tree exp)
5655 rtx tramp;
5657 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5658 return NULL_RTX;
5660 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5662 /* Unalign the descriptor to allow runtime identification. */
5663 tramp = plus_constant (ptr_mode, tramp,
5664 targetm.calls.custom_function_descriptors);
5666 return force_operand (tramp, NULL_RTX);
5669 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5670 function. The function first checks whether the back end provides
5671 an insn to implement signbit for the respective mode. If not, it
5672 checks whether the floating point format of the value is such that
5673 the sign bit can be extracted. If that is not the case, error out.
5674 EXP is the expression that is a call to the builtin function; if
5675 convenient, the result should be placed in TARGET. */
5676 static rtx
5677 expand_builtin_signbit (tree exp, rtx target)
5679 const struct real_format *fmt;
5680 scalar_float_mode fmode;
5681 scalar_int_mode rmode, imode;
5682 tree arg;
5683 int word, bitpos;
5684 enum insn_code icode;
5685 rtx temp;
5686 location_t loc = EXPR_LOCATION (exp);
5688 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5689 return NULL_RTX;
5691 arg = CALL_EXPR_ARG (exp, 0);
5692 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5693 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5694 fmt = REAL_MODE_FORMAT (fmode);
5696 arg = builtin_save_expr (arg);
5698 /* Expand the argument yielding a RTX expression. */
5699 temp = expand_normal (arg);
5701 /* Check if the back end provides an insn that handles signbit for the
5702 argument's mode. */
5703 icode = optab_handler (signbit_optab, fmode);
5704 if (icode != CODE_FOR_nothing)
5706 rtx_insn *last = get_last_insn ();
5707 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5708 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5709 return target;
5710 delete_insns_since (last);
5713 /* For floating point formats without a sign bit, implement signbit
5714 as "ARG < 0.0". */
5715 bitpos = fmt->signbit_ro;
5716 if (bitpos < 0)
5718 /* But we can't do this if the format supports signed zero. */
5719 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5721 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5722 build_real (TREE_TYPE (arg), dconst0));
5723 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5726 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5728 imode = int_mode_for_mode (fmode).require ();
5729 temp = gen_lowpart (imode, temp);
5731 else
5733 imode = word_mode;
5734 /* Handle targets with different FP word orders. */
5735 if (FLOAT_WORDS_BIG_ENDIAN)
5736 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5737 else
5738 word = bitpos / BITS_PER_WORD;
5739 temp = operand_subword_force (temp, word, fmode);
5740 bitpos = bitpos % BITS_PER_WORD;
5743 /* Force the intermediate word_mode (or narrower) result into a
5744 register. This avoids attempting to create paradoxical SUBREGs
5745 of floating point modes below. */
5746 temp = force_reg (imode, temp);
5748 /* If the bitpos is within the "result mode" lowpart, the operation
5749 can be implement with a single bitwise AND. Otherwise, we need
5750 a right shift and an AND. */
5752 if (bitpos < GET_MODE_BITSIZE (rmode))
5754 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5756 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5757 temp = gen_lowpart (rmode, temp);
5758 temp = expand_binop (rmode, and_optab, temp,
5759 immed_wide_int_const (mask, rmode),
5760 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5762 else
5764 /* Perform a logical right shift to place the signbit in the least
5765 significant bit, then truncate the result to the desired mode
5766 and mask just this bit. */
5767 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5768 temp = gen_lowpart (rmode, temp);
5769 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5770 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5773 return temp;
5776 /* Expand fork or exec calls. TARGET is the desired target of the
5777 call. EXP is the call. FN is the
5778 identificator of the actual function. IGNORE is nonzero if the
5779 value is to be ignored. */
5781 static rtx
5782 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5784 tree id, decl;
5785 tree call;
5787 /* If we are not profiling, just call the function. */
5788 if (!profile_arc_flag)
5789 return NULL_RTX;
5791 /* Otherwise call the wrapper. This should be equivalent for the rest of
5792 compiler, so the code does not diverge, and the wrapper may run the
5793 code necessary for keeping the profiling sane. */
5795 switch (DECL_FUNCTION_CODE (fn))
5797 case BUILT_IN_FORK:
5798 id = get_identifier ("__gcov_fork");
5799 break;
5801 case BUILT_IN_EXECL:
5802 id = get_identifier ("__gcov_execl");
5803 break;
5805 case BUILT_IN_EXECV:
5806 id = get_identifier ("__gcov_execv");
5807 break;
5809 case BUILT_IN_EXECLP:
5810 id = get_identifier ("__gcov_execlp");
5811 break;
5813 case BUILT_IN_EXECLE:
5814 id = get_identifier ("__gcov_execle");
5815 break;
5817 case BUILT_IN_EXECVP:
5818 id = get_identifier ("__gcov_execvp");
5819 break;
5821 case BUILT_IN_EXECVE:
5822 id = get_identifier ("__gcov_execve");
5823 break;
5825 default:
5826 gcc_unreachable ();
5829 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5830 FUNCTION_DECL, id, TREE_TYPE (fn));
5831 DECL_EXTERNAL (decl) = 1;
5832 TREE_PUBLIC (decl) = 1;
5833 DECL_ARTIFICIAL (decl) = 1;
5834 TREE_NOTHROW (decl) = 1;
5835 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5836 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5837 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5838 return expand_call (call, target, ignore);
5843 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5844 the pointer in these functions is void*, the tree optimizers may remove
5845 casts. The mode computed in expand_builtin isn't reliable either, due
5846 to __sync_bool_compare_and_swap.
5848 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5849 group of builtins. This gives us log2 of the mode size. */
5851 static inline machine_mode
5852 get_builtin_sync_mode (int fcode_diff)
5854 /* The size is not negotiable, so ask not to get BLKmode in return
5855 if the target indicates that a smaller size would be better. */
5856 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5859 /* Expand the memory expression LOC and return the appropriate memory operand
5860 for the builtin_sync operations. */
5862 static rtx
5863 get_builtin_sync_mem (tree loc, machine_mode mode)
5865 rtx addr, mem;
5867 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5868 addr = convert_memory_address (Pmode, addr);
5870 /* Note that we explicitly do not want any alias information for this
5871 memory, so that we kill all other live memories. Otherwise we don't
5872 satisfy the full barrier semantics of the intrinsic. */
5873 mem = validize_mem (gen_rtx_MEM (mode, addr));
5875 /* The alignment needs to be at least according to that of the mode. */
5876 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5877 get_pointer_alignment (loc)));
5878 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5879 MEM_VOLATILE_P (mem) = 1;
5881 return mem;
5884 /* Make sure an argument is in the right mode.
5885 EXP is the tree argument.
5886 MODE is the mode it should be in. */
5888 static rtx
5889 expand_expr_force_mode (tree exp, machine_mode mode)
5891 rtx val;
5892 machine_mode old_mode;
5894 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5895 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5896 of CONST_INTs, where we know the old_mode only from the call argument. */
5898 old_mode = GET_MODE (val);
5899 if (old_mode == VOIDmode)
5900 old_mode = TYPE_MODE (TREE_TYPE (exp));
5901 val = convert_modes (mode, old_mode, val, 1);
5902 return val;
5906 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5907 EXP is the CALL_EXPR. CODE is the rtx code
5908 that corresponds to the arithmetic or logical operation from the name;
5909 an exception here is that NOT actually means NAND. TARGET is an optional
5910 place for us to store the results; AFTER is true if this is the
5911 fetch_and_xxx form. */
5913 static rtx
5914 expand_builtin_sync_operation (machine_mode mode, tree exp,
5915 enum rtx_code code, bool after,
5916 rtx target)
5918 rtx val, mem;
5919 location_t loc = EXPR_LOCATION (exp);
5921 if (code == NOT && warn_sync_nand)
5923 tree fndecl = get_callee_fndecl (exp);
5924 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5926 static bool warned_f_a_n, warned_n_a_f;
5928 switch (fcode)
5930 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5931 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5932 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5933 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5934 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5935 if (warned_f_a_n)
5936 break;
5938 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5939 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5940 warned_f_a_n = true;
5941 break;
5943 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5944 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5945 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5946 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5947 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5948 if (warned_n_a_f)
5949 break;
5951 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5952 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5953 warned_n_a_f = true;
5954 break;
5956 default:
5957 gcc_unreachable ();
5961 /* Expand the operands. */
5962 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5963 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5965 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5966 after);
5969 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5970 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5971 true if this is the boolean form. TARGET is a place for us to store the
5972 results; this is NOT optional if IS_BOOL is true. */
5974 static rtx
5975 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5976 bool is_bool, rtx target)
5978 rtx old_val, new_val, mem;
5979 rtx *pbool, *poval;
5981 /* Expand the operands. */
5982 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5983 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5984 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5986 pbool = poval = NULL;
5987 if (target != const0_rtx)
5989 if (is_bool)
5990 pbool = &target;
5991 else
5992 poval = &target;
5994 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5995 false, MEMMODEL_SYNC_SEQ_CST,
5996 MEMMODEL_SYNC_SEQ_CST))
5997 return NULL_RTX;
5999 return target;
6002 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6003 general form is actually an atomic exchange, and some targets only
6004 support a reduced form with the second argument being a constant 1.
6005 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6006 the results. */
6008 static rtx
6009 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
6010 rtx target)
6012 rtx val, mem;
6014 /* Expand the operands. */
6015 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6016 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6018 return expand_sync_lock_test_and_set (target, mem, val);
6021 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6023 static void
6024 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
6026 rtx mem;
6028 /* Expand the operands. */
6029 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6031 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
6034 /* Given an integer representing an ``enum memmodel'', verify its
6035 correctness and return the memory model enum. */
6037 static enum memmodel
6038 get_memmodel (tree exp)
6040 rtx op;
6041 unsigned HOST_WIDE_INT val;
6042 source_location loc
6043 = expansion_point_location_if_in_system_header (input_location);
6045 /* If the parameter is not a constant, it's a run time value so we'll just
6046 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6047 if (TREE_CODE (exp) != INTEGER_CST)
6048 return MEMMODEL_SEQ_CST;
6050 op = expand_normal (exp);
6052 val = INTVAL (op);
6053 if (targetm.memmodel_check)
6054 val = targetm.memmodel_check (val);
6055 else if (val & ~MEMMODEL_MASK)
6057 warning_at (loc, OPT_Winvalid_memory_model,
6058 "unknown architecture specifier in memory model to builtin");
6059 return MEMMODEL_SEQ_CST;
6062 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6063 if (memmodel_base (val) >= MEMMODEL_LAST)
6065 warning_at (loc, OPT_Winvalid_memory_model,
6066 "invalid memory model argument to builtin");
6067 return MEMMODEL_SEQ_CST;
6070 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6071 be conservative and promote consume to acquire. */
6072 if (val == MEMMODEL_CONSUME)
6073 val = MEMMODEL_ACQUIRE;
6075 return (enum memmodel) val;
6078 /* Expand the __atomic_exchange intrinsic:
6079 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6080 EXP is the CALL_EXPR.
6081 TARGET is an optional place for us to store the results. */
6083 static rtx
6084 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
6086 rtx val, mem;
6087 enum memmodel model;
6089 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6091 if (!flag_inline_atomics)
6092 return NULL_RTX;
6094 /* Expand the operands. */
6095 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6096 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6098 return expand_atomic_exchange (target, mem, val, model);
6101 /* Expand the __atomic_compare_exchange intrinsic:
6102 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6103 TYPE desired, BOOL weak,
6104 enum memmodel success,
6105 enum memmodel failure)
6106 EXP is the CALL_EXPR.
6107 TARGET is an optional place for us to store the results. */
6109 static rtx
6110 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
6111 rtx target)
6113 rtx expect, desired, mem, oldval;
6114 rtx_code_label *label;
6115 enum memmodel success, failure;
6116 tree weak;
6117 bool is_weak;
6118 source_location loc
6119 = expansion_point_location_if_in_system_header (input_location);
6121 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6122 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6124 if (failure > success)
6126 warning_at (loc, OPT_Winvalid_memory_model,
6127 "failure memory model cannot be stronger than success "
6128 "memory model for %<__atomic_compare_exchange%>");
6129 success = MEMMODEL_SEQ_CST;
6132 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6134 warning_at (loc, OPT_Winvalid_memory_model,
6135 "invalid failure memory model for "
6136 "%<__atomic_compare_exchange%>");
6137 failure = MEMMODEL_SEQ_CST;
6138 success = MEMMODEL_SEQ_CST;
6142 if (!flag_inline_atomics)
6143 return NULL_RTX;
6145 /* Expand the operands. */
6146 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6148 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6149 expect = convert_memory_address (Pmode, expect);
6150 expect = gen_rtx_MEM (mode, expect);
6151 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6153 weak = CALL_EXPR_ARG (exp, 3);
6154 is_weak = false;
6155 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6156 is_weak = true;
6158 if (target == const0_rtx)
6159 target = NULL;
6161 /* Lest the rtl backend create a race condition with an imporoper store
6162 to memory, always create a new pseudo for OLDVAL. */
6163 oldval = NULL;
6165 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6166 is_weak, success, failure))
6167 return NULL_RTX;
6169 /* Conditionally store back to EXPECT, lest we create a race condition
6170 with an improper store to memory. */
6171 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6172 the normal case where EXPECT is totally private, i.e. a register. At
6173 which point the store can be unconditional. */
6174 label = gen_label_rtx ();
6175 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6176 GET_MODE (target), 1, label);
6177 emit_move_insn (expect, oldval);
6178 emit_label (label);
6180 return target;
6183 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6184 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6185 call. The weak parameter must be dropped to match the expected parameter
6186 list and the expected argument changed from value to pointer to memory
6187 slot. */
6189 static void
6190 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6192 unsigned int z;
6193 vec<tree, va_gc> *vec;
6195 vec_alloc (vec, 5);
6196 vec->quick_push (gimple_call_arg (call, 0));
6197 tree expected = gimple_call_arg (call, 1);
6198 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6199 TREE_TYPE (expected));
6200 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6201 if (expd != x)
6202 emit_move_insn (x, expd);
6203 tree v = make_tree (TREE_TYPE (expected), x);
6204 vec->quick_push (build1 (ADDR_EXPR,
6205 build_pointer_type (TREE_TYPE (expected)), v));
6206 vec->quick_push (gimple_call_arg (call, 2));
6207 /* Skip the boolean weak parameter. */
6208 for (z = 4; z < 6; z++)
6209 vec->quick_push (gimple_call_arg (call, z));
6210 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6211 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6212 gcc_assert (bytes_log2 < 5);
6213 built_in_function fncode
6214 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6215 + bytes_log2);
6216 tree fndecl = builtin_decl_explicit (fncode);
6217 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6218 fndecl);
6219 tree exp = build_call_vec (boolean_type_node, fn, vec);
6220 tree lhs = gimple_call_lhs (call);
6221 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6222 if (lhs)
6224 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6225 if (GET_MODE (boolret) != mode)
6226 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6227 x = force_reg (mode, x);
6228 write_complex_part (target, boolret, true);
6229 write_complex_part (target, x, false);
6233 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6235 void
6236 expand_ifn_atomic_compare_exchange (gcall *call)
6238 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6239 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6240 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6241 rtx expect, desired, mem, oldval, boolret;
6242 enum memmodel success, failure;
6243 tree lhs;
6244 bool is_weak;
6245 source_location loc
6246 = expansion_point_location_if_in_system_header (gimple_location (call));
6248 success = get_memmodel (gimple_call_arg (call, 4));
6249 failure = get_memmodel (gimple_call_arg (call, 5));
6251 if (failure > success)
6253 warning_at (loc, OPT_Winvalid_memory_model,
6254 "failure memory model cannot be stronger than success "
6255 "memory model for %<__atomic_compare_exchange%>");
6256 success = MEMMODEL_SEQ_CST;
6259 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6261 warning_at (loc, OPT_Winvalid_memory_model,
6262 "invalid failure memory model for "
6263 "%<__atomic_compare_exchange%>");
6264 failure = MEMMODEL_SEQ_CST;
6265 success = MEMMODEL_SEQ_CST;
6268 if (!flag_inline_atomics)
6270 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6271 return;
6274 /* Expand the operands. */
6275 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6277 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6278 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6280 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6282 boolret = NULL;
6283 oldval = NULL;
6285 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6286 is_weak, success, failure))
6288 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6289 return;
6292 lhs = gimple_call_lhs (call);
6293 if (lhs)
6295 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6296 if (GET_MODE (boolret) != mode)
6297 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6298 write_complex_part (target, boolret, true);
6299 write_complex_part (target, oldval, false);
6303 /* Expand the __atomic_load intrinsic:
6304 TYPE __atomic_load (TYPE *object, enum memmodel)
6305 EXP is the CALL_EXPR.
6306 TARGET is an optional place for us to store the results. */
6308 static rtx
6309 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6311 rtx mem;
6312 enum memmodel model;
6314 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6315 if (is_mm_release (model) || is_mm_acq_rel (model))
6317 source_location loc
6318 = expansion_point_location_if_in_system_header (input_location);
6319 warning_at (loc, OPT_Winvalid_memory_model,
6320 "invalid memory model for %<__atomic_load%>");
6321 model = MEMMODEL_SEQ_CST;
6324 if (!flag_inline_atomics)
6325 return NULL_RTX;
6327 /* Expand the operand. */
6328 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6330 return expand_atomic_load (target, mem, model);
6334 /* Expand the __atomic_store intrinsic:
6335 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6336 EXP is the CALL_EXPR.
6337 TARGET is an optional place for us to store the results. */
6339 static rtx
6340 expand_builtin_atomic_store (machine_mode mode, tree exp)
6342 rtx mem, val;
6343 enum memmodel model;
6345 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6346 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6347 || is_mm_release (model)))
6349 source_location loc
6350 = expansion_point_location_if_in_system_header (input_location);
6351 warning_at (loc, OPT_Winvalid_memory_model,
6352 "invalid memory model for %<__atomic_store%>");
6353 model = MEMMODEL_SEQ_CST;
6356 if (!flag_inline_atomics)
6357 return NULL_RTX;
6359 /* Expand the operands. */
6360 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6361 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6363 return expand_atomic_store (mem, val, model, false);
6366 /* Expand the __atomic_fetch_XXX intrinsic:
6367 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6368 EXP is the CALL_EXPR.
6369 TARGET is an optional place for us to store the results.
6370 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6371 FETCH_AFTER is true if returning the result of the operation.
6372 FETCH_AFTER is false if returning the value before the operation.
6373 IGNORE is true if the result is not used.
6374 EXT_CALL is the correct builtin for an external call if this cannot be
6375 resolved to an instruction sequence. */
6377 static rtx
6378 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6379 enum rtx_code code, bool fetch_after,
6380 bool ignore, enum built_in_function ext_call)
6382 rtx val, mem, ret;
6383 enum memmodel model;
6384 tree fndecl;
6385 tree addr;
6387 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6389 /* Expand the operands. */
6390 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6391 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6393 /* Only try generating instructions if inlining is turned on. */
6394 if (flag_inline_atomics)
6396 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6397 if (ret)
6398 return ret;
6401 /* Return if a different routine isn't needed for the library call. */
6402 if (ext_call == BUILT_IN_NONE)
6403 return NULL_RTX;
6405 /* Change the call to the specified function. */
6406 fndecl = get_callee_fndecl (exp);
6407 addr = CALL_EXPR_FN (exp);
6408 STRIP_NOPS (addr);
6410 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6411 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6413 /* If we will emit code after the call, the call can not be a tail call.
6414 If it is emitted as a tail call, a barrier is emitted after it, and
6415 then all trailing code is removed. */
6416 if (!ignore)
6417 CALL_EXPR_TAILCALL (exp) = 0;
6419 /* Expand the call here so we can emit trailing code. */
6420 ret = expand_call (exp, target, ignore);
6422 /* Replace the original function just in case it matters. */
6423 TREE_OPERAND (addr, 0) = fndecl;
6425 /* Then issue the arithmetic correction to return the right result. */
6426 if (!ignore)
6428 if (code == NOT)
6430 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6431 OPTAB_LIB_WIDEN);
6432 ret = expand_simple_unop (mode, NOT, ret, target, true);
6434 else
6435 ret = expand_simple_binop (mode, code, ret, val, target, true,
6436 OPTAB_LIB_WIDEN);
6438 return ret;
6441 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6443 void
6444 expand_ifn_atomic_bit_test_and (gcall *call)
6446 tree ptr = gimple_call_arg (call, 0);
6447 tree bit = gimple_call_arg (call, 1);
6448 tree flag = gimple_call_arg (call, 2);
6449 tree lhs = gimple_call_lhs (call);
6450 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6451 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6452 enum rtx_code code;
6453 optab optab;
6454 struct expand_operand ops[5];
6456 gcc_assert (flag_inline_atomics);
6458 if (gimple_call_num_args (call) == 4)
6459 model = get_memmodel (gimple_call_arg (call, 3));
6461 rtx mem = get_builtin_sync_mem (ptr, mode);
6462 rtx val = expand_expr_force_mode (bit, mode);
6464 switch (gimple_call_internal_fn (call))
6466 case IFN_ATOMIC_BIT_TEST_AND_SET:
6467 code = IOR;
6468 optab = atomic_bit_test_and_set_optab;
6469 break;
6470 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6471 code = XOR;
6472 optab = atomic_bit_test_and_complement_optab;
6473 break;
6474 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6475 code = AND;
6476 optab = atomic_bit_test_and_reset_optab;
6477 break;
6478 default:
6479 gcc_unreachable ();
6482 if (lhs == NULL_TREE)
6484 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6485 val, NULL_RTX, true, OPTAB_DIRECT);
6486 if (code == AND)
6487 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6488 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6489 return;
6492 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6493 enum insn_code icode = direct_optab_handler (optab, mode);
6494 gcc_assert (icode != CODE_FOR_nothing);
6495 create_output_operand (&ops[0], target, mode);
6496 create_fixed_operand (&ops[1], mem);
6497 create_convert_operand_to (&ops[2], val, mode, true);
6498 create_integer_operand (&ops[3], model);
6499 create_integer_operand (&ops[4], integer_onep (flag));
6500 if (maybe_expand_insn (icode, 5, ops))
6501 return;
6503 rtx bitval = val;
6504 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6505 val, NULL_RTX, true, OPTAB_DIRECT);
6506 rtx maskval = val;
6507 if (code == AND)
6508 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6509 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6510 code, model, false);
6511 if (integer_onep (flag))
6513 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6514 NULL_RTX, true, OPTAB_DIRECT);
6515 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6516 true, OPTAB_DIRECT);
6518 else
6519 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6520 OPTAB_DIRECT);
6521 if (result != target)
6522 emit_move_insn (target, result);
6525 /* Expand an atomic clear operation.
6526 void _atomic_clear (BOOL *obj, enum memmodel)
6527 EXP is the call expression. */
6529 static rtx
6530 expand_builtin_atomic_clear (tree exp)
6532 machine_mode mode;
6533 rtx mem, ret;
6534 enum memmodel model;
6536 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6537 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6538 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6540 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6542 source_location loc
6543 = expansion_point_location_if_in_system_header (input_location);
6544 warning_at (loc, OPT_Winvalid_memory_model,
6545 "invalid memory model for %<__atomic_store%>");
6546 model = MEMMODEL_SEQ_CST;
6549 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6550 Failing that, a store is issued by __atomic_store. The only way this can
6551 fail is if the bool type is larger than a word size. Unlikely, but
6552 handle it anyway for completeness. Assume a single threaded model since
6553 there is no atomic support in this case, and no barriers are required. */
6554 ret = expand_atomic_store (mem, const0_rtx, model, true);
6555 if (!ret)
6556 emit_move_insn (mem, const0_rtx);
6557 return const0_rtx;
6560 /* Expand an atomic test_and_set operation.
6561 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6562 EXP is the call expression. */
6564 static rtx
6565 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6567 rtx mem;
6568 enum memmodel model;
6569 machine_mode mode;
6571 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6572 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6573 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6575 return expand_atomic_test_and_set (target, mem, model);
6579 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6580 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6582 static tree
6583 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6585 int size;
6586 machine_mode mode;
6587 unsigned int mode_align, type_align;
6589 if (TREE_CODE (arg0) != INTEGER_CST)
6590 return NULL_TREE;
6592 /* We need a corresponding integer mode for the access to be lock-free. */
6593 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6594 if (!int_mode_for_size (size, 0).exists (&mode))
6595 return boolean_false_node;
6597 mode_align = GET_MODE_ALIGNMENT (mode);
6599 if (TREE_CODE (arg1) == INTEGER_CST)
6601 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6603 /* Either this argument is null, or it's a fake pointer encoding
6604 the alignment of the object. */
6605 val = least_bit_hwi (val);
6606 val *= BITS_PER_UNIT;
6608 if (val == 0 || mode_align < val)
6609 type_align = mode_align;
6610 else
6611 type_align = val;
6613 else
6615 tree ttype = TREE_TYPE (arg1);
6617 /* This function is usually invoked and folded immediately by the front
6618 end before anything else has a chance to look at it. The pointer
6619 parameter at this point is usually cast to a void *, so check for that
6620 and look past the cast. */
6621 if (CONVERT_EXPR_P (arg1)
6622 && POINTER_TYPE_P (ttype)
6623 && VOID_TYPE_P (TREE_TYPE (ttype))
6624 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6625 arg1 = TREE_OPERAND (arg1, 0);
6627 ttype = TREE_TYPE (arg1);
6628 gcc_assert (POINTER_TYPE_P (ttype));
6630 /* Get the underlying type of the object. */
6631 ttype = TREE_TYPE (ttype);
6632 type_align = TYPE_ALIGN (ttype);
6635 /* If the object has smaller alignment, the lock free routines cannot
6636 be used. */
6637 if (type_align < mode_align)
6638 return boolean_false_node;
6640 /* Check if a compare_and_swap pattern exists for the mode which represents
6641 the required size. The pattern is not allowed to fail, so the existence
6642 of the pattern indicates support is present. Also require that an
6643 atomic load exists for the required size. */
6644 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6645 return boolean_true_node;
6646 else
6647 return boolean_false_node;
6650 /* Return true if the parameters to call EXP represent an object which will
6651 always generate lock free instructions. The first argument represents the
6652 size of the object, and the second parameter is a pointer to the object
6653 itself. If NULL is passed for the object, then the result is based on
6654 typical alignment for an object of the specified size. Otherwise return
6655 false. */
6657 static rtx
6658 expand_builtin_atomic_always_lock_free (tree exp)
6660 tree size;
6661 tree arg0 = CALL_EXPR_ARG (exp, 0);
6662 tree arg1 = CALL_EXPR_ARG (exp, 1);
6664 if (TREE_CODE (arg0) != INTEGER_CST)
6666 error ("non-constant argument 1 to __atomic_always_lock_free");
6667 return const0_rtx;
6670 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6671 if (size == boolean_true_node)
6672 return const1_rtx;
6673 return const0_rtx;
6676 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6677 is lock free on this architecture. */
6679 static tree
6680 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6682 if (!flag_inline_atomics)
6683 return NULL_TREE;
6685 /* If it isn't always lock free, don't generate a result. */
6686 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6687 return boolean_true_node;
6689 return NULL_TREE;
6692 /* Return true if the parameters to call EXP represent an object which will
6693 always generate lock free instructions. The first argument represents the
6694 size of the object, and the second parameter is a pointer to the object
6695 itself. If NULL is passed for the object, then the result is based on
6696 typical alignment for an object of the specified size. Otherwise return
6697 NULL*/
6699 static rtx
6700 expand_builtin_atomic_is_lock_free (tree exp)
6702 tree size;
6703 tree arg0 = CALL_EXPR_ARG (exp, 0);
6704 tree arg1 = CALL_EXPR_ARG (exp, 1);
6706 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6708 error ("non-integer argument 1 to __atomic_is_lock_free");
6709 return NULL_RTX;
6712 if (!flag_inline_atomics)
6713 return NULL_RTX;
6715 /* If the value is known at compile time, return the RTX for it. */
6716 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6717 if (size == boolean_true_node)
6718 return const1_rtx;
6720 return NULL_RTX;
6723 /* Expand the __atomic_thread_fence intrinsic:
6724 void __atomic_thread_fence (enum memmodel)
6725 EXP is the CALL_EXPR. */
6727 static void
6728 expand_builtin_atomic_thread_fence (tree exp)
6730 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6731 expand_mem_thread_fence (model);
6734 /* Expand the __atomic_signal_fence intrinsic:
6735 void __atomic_signal_fence (enum memmodel)
6736 EXP is the CALL_EXPR. */
6738 static void
6739 expand_builtin_atomic_signal_fence (tree exp)
6741 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6742 expand_mem_signal_fence (model);
6745 /* Expand the __sync_synchronize intrinsic. */
6747 static void
6748 expand_builtin_sync_synchronize (void)
6750 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6753 static rtx
6754 expand_builtin_thread_pointer (tree exp, rtx target)
6756 enum insn_code icode;
6757 if (!validate_arglist (exp, VOID_TYPE))
6758 return const0_rtx;
6759 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6760 if (icode != CODE_FOR_nothing)
6762 struct expand_operand op;
6763 /* If the target is not sutitable then create a new target. */
6764 if (target == NULL_RTX
6765 || !REG_P (target)
6766 || GET_MODE (target) != Pmode)
6767 target = gen_reg_rtx (Pmode);
6768 create_output_operand (&op, target, Pmode);
6769 expand_insn (icode, 1, &op);
6770 return target;
6772 error ("__builtin_thread_pointer is not supported on this target");
6773 return const0_rtx;
6776 static void
6777 expand_builtin_set_thread_pointer (tree exp)
6779 enum insn_code icode;
6780 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6781 return;
6782 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6783 if (icode != CODE_FOR_nothing)
6785 struct expand_operand op;
6786 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6787 Pmode, EXPAND_NORMAL);
6788 create_input_operand (&op, val, Pmode);
6789 expand_insn (icode, 1, &op);
6790 return;
6792 error ("__builtin_set_thread_pointer is not supported on this target");
6796 /* Emit code to restore the current value of stack. */
6798 static void
6799 expand_stack_restore (tree var)
6801 rtx_insn *prev;
6802 rtx sa = expand_normal (var);
6804 sa = convert_memory_address (Pmode, sa);
6806 prev = get_last_insn ();
6807 emit_stack_restore (SAVE_BLOCK, sa);
6809 record_new_stack_level ();
6811 fixup_args_size_notes (prev, get_last_insn (), 0);
6814 /* Emit code to save the current value of stack. */
6816 static rtx
6817 expand_stack_save (void)
6819 rtx ret = NULL_RTX;
6821 emit_stack_save (SAVE_BLOCK, &ret);
6822 return ret;
6825 /* Emit code to get the openacc gang, worker or vector id or size. */
6827 static rtx
6828 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
6830 const char *name;
6831 rtx fallback_retval;
6832 rtx_insn *(*gen_fn) (rtx, rtx);
6833 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
6835 case BUILT_IN_GOACC_PARLEVEL_ID:
6836 name = "__builtin_goacc_parlevel_id";
6837 fallback_retval = const0_rtx;
6838 gen_fn = targetm.gen_oacc_dim_pos;
6839 break;
6840 case BUILT_IN_GOACC_PARLEVEL_SIZE:
6841 name = "__builtin_goacc_parlevel_size";
6842 fallback_retval = const1_rtx;
6843 gen_fn = targetm.gen_oacc_dim_size;
6844 break;
6845 default:
6846 gcc_unreachable ();
6849 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
6851 error ("%qs only supported in OpenACC code", name);
6852 return const0_rtx;
6855 tree arg = CALL_EXPR_ARG (exp, 0);
6856 if (TREE_CODE (arg) != INTEGER_CST)
6858 error ("non-constant argument 0 to %qs", name);
6859 return const0_rtx;
6862 int dim = TREE_INT_CST_LOW (arg);
6863 switch (dim)
6865 case GOMP_DIM_GANG:
6866 case GOMP_DIM_WORKER:
6867 case GOMP_DIM_VECTOR:
6868 break;
6869 default:
6870 error ("illegal argument 0 to %qs", name);
6871 return const0_rtx;
6874 if (ignore)
6875 return target;
6877 if (target == NULL_RTX)
6878 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6880 if (!targetm.have_oacc_dim_size ())
6882 emit_move_insn (target, fallback_retval);
6883 return target;
6886 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
6887 emit_insn (gen_fn (reg, GEN_INT (dim)));
6888 if (reg != target)
6889 emit_move_insn (target, reg);
6891 return target;
6894 /* Expand a string compare operation using a sequence of char comparison
6895 to get rid of the calling overhead, with result going to TARGET if
6896 that's convenient.
6898 VAR_STR is the variable string source;
6899 CONST_STR is the constant string source;
6900 LENGTH is the number of chars to compare;
6901 CONST_STR_N indicates which source string is the constant string;
6902 IS_MEMCMP indicates whether it's a memcmp or strcmp.
6904 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
6906 target = (int) (unsigned char) var_str[0]
6907 - (int) (unsigned char) const_str[0];
6908 if (target != 0)
6909 goto ne_label;
6911 target = (int) (unsigned char) var_str[length - 2]
6912 - (int) (unsigned char) const_str[length - 2];
6913 if (target != 0)
6914 goto ne_label;
6915 target = (int) (unsigned char) var_str[length - 1]
6916 - (int) (unsigned char) const_str[length - 1];
6917 ne_label:
6920 static rtx
6921 inline_string_cmp (rtx target, tree var_str, const char *const_str,
6922 unsigned HOST_WIDE_INT length,
6923 int const_str_n, machine_mode mode)
6925 HOST_WIDE_INT offset = 0;
6926 rtx var_rtx_array
6927 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
6928 rtx var_rtx = NULL_RTX;
6929 rtx const_rtx = NULL_RTX;
6930 rtx result = target ? target : gen_reg_rtx (mode);
6931 rtx_code_label *ne_label = gen_label_rtx ();
6932 tree unit_type_node = unsigned_char_type_node;
6933 scalar_int_mode unit_mode
6934 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
6936 start_sequence ();
6938 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
6940 var_rtx
6941 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
6942 const_rtx = c_readstr (const_str + offset, unit_mode);
6943 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
6944 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
6946 op0 = convert_modes (mode, unit_mode, op0, 1);
6947 op1 = convert_modes (mode, unit_mode, op1, 1);
6948 result = expand_simple_binop (mode, MINUS, op0, op1,
6949 result, 1, OPTAB_WIDEN);
6950 if (i < length - 1)
6951 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
6952 mode, true, ne_label);
6953 offset += GET_MODE_SIZE (unit_mode);
6956 emit_label (ne_label);
6957 rtx_insn *insns = get_insns ();
6958 end_sequence ();
6959 emit_insn (insns);
6961 return result;
6964 /* Inline expansion a call to str(n)cmp, with result going to
6965 TARGET if that's convenient.
6966 If the call is not been inlined, return NULL_RTX. */
6967 static rtx
6968 inline_expand_builtin_string_cmp (tree exp, rtx target)
6970 tree fndecl = get_callee_fndecl (exp);
6971 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6972 unsigned HOST_WIDE_INT length = 0;
6973 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
6975 /* Do NOT apply this inlining expansion when optimizing for size or
6976 optimization level below 2. */
6977 if (optimize < 2 || optimize_insn_for_size_p ())
6978 return NULL_RTX;
6980 gcc_checking_assert (fcode == BUILT_IN_STRCMP
6981 || fcode == BUILT_IN_STRNCMP
6982 || fcode == BUILT_IN_MEMCMP);
6984 /* On a target where the type of the call (int) has same or narrower presicion
6985 than unsigned char, give up the inlining expansion. */
6986 if (TYPE_PRECISION (unsigned_char_type_node)
6987 >= TYPE_PRECISION (TREE_TYPE (exp)))
6988 return NULL_RTX;
6990 tree arg1 = CALL_EXPR_ARG (exp, 0);
6991 tree arg2 = CALL_EXPR_ARG (exp, 1);
6992 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
6994 unsigned HOST_WIDE_INT len1 = 0;
6995 unsigned HOST_WIDE_INT len2 = 0;
6996 unsigned HOST_WIDE_INT len3 = 0;
6998 const char *src_str1 = c_getstr (arg1, &len1);
6999 const char *src_str2 = c_getstr (arg2, &len2);
7001 /* If neither strings is constant string, the call is not qualify. */
7002 if (!src_str1 && !src_str2)
7003 return NULL_RTX;
7005 /* For strncmp, if the length is not a const, not qualify. */
7006 if (is_ncmp && !tree_fits_uhwi_p (len3_tree))
7007 return NULL_RTX;
7009 int const_str_n = 0;
7010 if (!len1)
7011 const_str_n = 2;
7012 else if (!len2)
7013 const_str_n = 1;
7014 else if (len2 > len1)
7015 const_str_n = 1;
7016 else
7017 const_str_n = 2;
7019 gcc_checking_assert (const_str_n > 0);
7020 length = (const_str_n == 1) ? len1 : len2;
7022 if (is_ncmp && (len3 = tree_to_uhwi (len3_tree)) < length)
7023 length = len3;
7025 /* If the length of the comparision is larger than the threshold,
7026 do nothing. */
7027 if (length > (unsigned HOST_WIDE_INT)
7028 PARAM_VALUE (BUILTIN_STRING_CMP_INLINE_LENGTH))
7029 return NULL_RTX;
7031 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7033 /* Now, start inline expansion the call. */
7034 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
7035 (const_str_n == 1) ? src_str1 : src_str2, length,
7036 const_str_n, mode);
7039 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7040 represents the size of the first argument to that call, or VOIDmode
7041 if the argument is a pointer. IGNORE will be true if the result
7042 isn't used. */
7043 static rtx
7044 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
7045 bool ignore)
7047 rtx val, failsafe;
7048 unsigned nargs = call_expr_nargs (exp);
7050 tree arg0 = CALL_EXPR_ARG (exp, 0);
7052 if (mode == VOIDmode)
7054 mode = TYPE_MODE (TREE_TYPE (arg0));
7055 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
7058 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
7060 /* An optional second argument can be used as a failsafe value on
7061 some machines. If it isn't present, then the failsafe value is
7062 assumed to be 0. */
7063 if (nargs > 1)
7065 tree arg1 = CALL_EXPR_ARG (exp, 1);
7066 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
7068 else
7069 failsafe = const0_rtx;
7071 /* If the result isn't used, the behavior is undefined. It would be
7072 nice to emit a warning here, but path splitting means this might
7073 happen with legitimate code. So simply drop the builtin
7074 expansion in that case; we've handled any side-effects above. */
7075 if (ignore)
7076 return const0_rtx;
7078 /* If we don't have a suitable target, create one to hold the result. */
7079 if (target == NULL || GET_MODE (target) != mode)
7080 target = gen_reg_rtx (mode);
7082 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7083 val = convert_modes (mode, VOIDmode, val, false);
7085 return targetm.speculation_safe_value (mode, target, val, failsafe);
7088 /* Expand an expression EXP that calls a built-in function,
7089 with result going to TARGET if that's convenient
7090 (and in mode MODE if that's convenient).
7091 SUBTARGET may be used as the target for computing one of EXP's operands.
7092 IGNORE is nonzero if the value is to be ignored. */
7095 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7096 int ignore)
7098 tree fndecl = get_callee_fndecl (exp);
7099 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7100 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7101 int flags;
7103 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7104 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7106 /* When ASan is enabled, we don't want to expand some memory/string
7107 builtins and rely on libsanitizer's hooks. This allows us to avoid
7108 redundant checks and be sure, that possible overflow will be detected
7109 by ASan. */
7111 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7112 return expand_call (exp, target, ignore);
7114 /* When not optimizing, generate calls to library functions for a certain
7115 set of builtins. */
7116 if (!optimize
7117 && !called_as_built_in (fndecl)
7118 && fcode != BUILT_IN_FORK
7119 && fcode != BUILT_IN_EXECL
7120 && fcode != BUILT_IN_EXECV
7121 && fcode != BUILT_IN_EXECLP
7122 && fcode != BUILT_IN_EXECLE
7123 && fcode != BUILT_IN_EXECVP
7124 && fcode != BUILT_IN_EXECVE
7125 && !ALLOCA_FUNCTION_CODE_P (fcode)
7126 && fcode != BUILT_IN_FREE)
7127 return expand_call (exp, target, ignore);
7129 /* The built-in function expanders test for target == const0_rtx
7130 to determine whether the function's result will be ignored. */
7131 if (ignore)
7132 target = const0_rtx;
7134 /* If the result of a pure or const built-in function is ignored, and
7135 none of its arguments are volatile, we can avoid expanding the
7136 built-in call and just evaluate the arguments for side-effects. */
7137 if (target == const0_rtx
7138 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7139 && !(flags & ECF_LOOPING_CONST_OR_PURE))
7141 bool volatilep = false;
7142 tree arg;
7143 call_expr_arg_iterator iter;
7145 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7146 if (TREE_THIS_VOLATILE (arg))
7148 volatilep = true;
7149 break;
7152 if (! volatilep)
7154 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7155 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7156 return const0_rtx;
7160 switch (fcode)
7162 CASE_FLT_FN (BUILT_IN_FABS):
7163 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7164 case BUILT_IN_FABSD32:
7165 case BUILT_IN_FABSD64:
7166 case BUILT_IN_FABSD128:
7167 target = expand_builtin_fabs (exp, target, subtarget);
7168 if (target)
7169 return target;
7170 break;
7172 CASE_FLT_FN (BUILT_IN_COPYSIGN):
7173 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7174 target = expand_builtin_copysign (exp, target, subtarget);
7175 if (target)
7176 return target;
7177 break;
7179 /* Just do a normal library call if we were unable to fold
7180 the values. */
7181 CASE_FLT_FN (BUILT_IN_CABS):
7182 break;
7184 CASE_FLT_FN (BUILT_IN_FMA):
7185 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7186 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7187 if (target)
7188 return target;
7189 break;
7191 CASE_FLT_FN (BUILT_IN_ILOGB):
7192 if (! flag_unsafe_math_optimizations)
7193 break;
7194 gcc_fallthrough ();
7195 CASE_FLT_FN (BUILT_IN_ISINF):
7196 CASE_FLT_FN (BUILT_IN_FINITE):
7197 case BUILT_IN_ISFINITE:
7198 case BUILT_IN_ISNORMAL:
7199 target = expand_builtin_interclass_mathfn (exp, target);
7200 if (target)
7201 return target;
7202 break;
7204 CASE_FLT_FN (BUILT_IN_ICEIL):
7205 CASE_FLT_FN (BUILT_IN_LCEIL):
7206 CASE_FLT_FN (BUILT_IN_LLCEIL):
7207 CASE_FLT_FN (BUILT_IN_LFLOOR):
7208 CASE_FLT_FN (BUILT_IN_IFLOOR):
7209 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7210 target = expand_builtin_int_roundingfn (exp, target);
7211 if (target)
7212 return target;
7213 break;
7215 CASE_FLT_FN (BUILT_IN_IRINT):
7216 CASE_FLT_FN (BUILT_IN_LRINT):
7217 CASE_FLT_FN (BUILT_IN_LLRINT):
7218 CASE_FLT_FN (BUILT_IN_IROUND):
7219 CASE_FLT_FN (BUILT_IN_LROUND):
7220 CASE_FLT_FN (BUILT_IN_LLROUND):
7221 target = expand_builtin_int_roundingfn_2 (exp, target);
7222 if (target)
7223 return target;
7224 break;
7226 CASE_FLT_FN (BUILT_IN_POWI):
7227 target = expand_builtin_powi (exp, target);
7228 if (target)
7229 return target;
7230 break;
7232 CASE_FLT_FN (BUILT_IN_CEXPI):
7233 target = expand_builtin_cexpi (exp, target);
7234 gcc_assert (target);
7235 return target;
7237 CASE_FLT_FN (BUILT_IN_SIN):
7238 CASE_FLT_FN (BUILT_IN_COS):
7239 if (! flag_unsafe_math_optimizations)
7240 break;
7241 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7242 if (target)
7243 return target;
7244 break;
7246 CASE_FLT_FN (BUILT_IN_SINCOS):
7247 if (! flag_unsafe_math_optimizations)
7248 break;
7249 target = expand_builtin_sincos (exp);
7250 if (target)
7251 return target;
7252 break;
7254 case BUILT_IN_APPLY_ARGS:
7255 return expand_builtin_apply_args ();
7257 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7258 FUNCTION with a copy of the parameters described by
7259 ARGUMENTS, and ARGSIZE. It returns a block of memory
7260 allocated on the stack into which is stored all the registers
7261 that might possibly be used for returning the result of a
7262 function. ARGUMENTS is the value returned by
7263 __builtin_apply_args. ARGSIZE is the number of bytes of
7264 arguments that must be copied. ??? How should this value be
7265 computed? We'll also need a safe worst case value for varargs
7266 functions. */
7267 case BUILT_IN_APPLY:
7268 if (!validate_arglist (exp, POINTER_TYPE,
7269 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7270 && !validate_arglist (exp, REFERENCE_TYPE,
7271 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7272 return const0_rtx;
7273 else
7275 rtx ops[3];
7277 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7278 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7279 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7281 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7284 /* __builtin_return (RESULT) causes the function to return the
7285 value described by RESULT. RESULT is address of the block of
7286 memory returned by __builtin_apply. */
7287 case BUILT_IN_RETURN:
7288 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7289 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7290 return const0_rtx;
7292 case BUILT_IN_SAVEREGS:
7293 return expand_builtin_saveregs ();
7295 case BUILT_IN_VA_ARG_PACK:
7296 /* All valid uses of __builtin_va_arg_pack () are removed during
7297 inlining. */
7298 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
7299 return const0_rtx;
7301 case BUILT_IN_VA_ARG_PACK_LEN:
7302 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7303 inlining. */
7304 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
7305 return const0_rtx;
7307 /* Return the address of the first anonymous stack arg. */
7308 case BUILT_IN_NEXT_ARG:
7309 if (fold_builtin_next_arg (exp, false))
7310 return const0_rtx;
7311 return expand_builtin_next_arg ();
7313 case BUILT_IN_CLEAR_CACHE:
7314 target = expand_builtin___clear_cache (exp);
7315 if (target)
7316 return target;
7317 break;
7319 case BUILT_IN_CLASSIFY_TYPE:
7320 return expand_builtin_classify_type (exp);
7322 case BUILT_IN_CONSTANT_P:
7323 return const0_rtx;
7325 case BUILT_IN_FRAME_ADDRESS:
7326 case BUILT_IN_RETURN_ADDRESS:
7327 return expand_builtin_frame_address (fndecl, exp);
7329 /* Returns the address of the area where the structure is returned.
7330 0 otherwise. */
7331 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7332 if (call_expr_nargs (exp) != 0
7333 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7334 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7335 return const0_rtx;
7336 else
7337 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7339 CASE_BUILT_IN_ALLOCA:
7340 target = expand_builtin_alloca (exp);
7341 if (target)
7342 return target;
7343 break;
7345 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7346 return expand_asan_emit_allocas_unpoison (exp);
7348 case BUILT_IN_STACK_SAVE:
7349 return expand_stack_save ();
7351 case BUILT_IN_STACK_RESTORE:
7352 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7353 return const0_rtx;
7355 case BUILT_IN_BSWAP16:
7356 case BUILT_IN_BSWAP32:
7357 case BUILT_IN_BSWAP64:
7358 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7359 if (target)
7360 return target;
7361 break;
7363 CASE_INT_FN (BUILT_IN_FFS):
7364 target = expand_builtin_unop (target_mode, exp, target,
7365 subtarget, ffs_optab);
7366 if (target)
7367 return target;
7368 break;
7370 CASE_INT_FN (BUILT_IN_CLZ):
7371 target = expand_builtin_unop (target_mode, exp, target,
7372 subtarget, clz_optab);
7373 if (target)
7374 return target;
7375 break;
7377 CASE_INT_FN (BUILT_IN_CTZ):
7378 target = expand_builtin_unop (target_mode, exp, target,
7379 subtarget, ctz_optab);
7380 if (target)
7381 return target;
7382 break;
7384 CASE_INT_FN (BUILT_IN_CLRSB):
7385 target = expand_builtin_unop (target_mode, exp, target,
7386 subtarget, clrsb_optab);
7387 if (target)
7388 return target;
7389 break;
7391 CASE_INT_FN (BUILT_IN_POPCOUNT):
7392 target = expand_builtin_unop (target_mode, exp, target,
7393 subtarget, popcount_optab);
7394 if (target)
7395 return target;
7396 break;
7398 CASE_INT_FN (BUILT_IN_PARITY):
7399 target = expand_builtin_unop (target_mode, exp, target,
7400 subtarget, parity_optab);
7401 if (target)
7402 return target;
7403 break;
7405 case BUILT_IN_STRLEN:
7406 target = expand_builtin_strlen (exp, target, target_mode);
7407 if (target)
7408 return target;
7409 break;
7411 case BUILT_IN_STRNLEN:
7412 target = expand_builtin_strnlen (exp, target, target_mode);
7413 if (target)
7414 return target;
7415 break;
7417 case BUILT_IN_STRCAT:
7418 target = expand_builtin_strcat (exp, target);
7419 if (target)
7420 return target;
7421 break;
7423 case BUILT_IN_STRCPY:
7424 target = expand_builtin_strcpy (exp, target);
7425 if (target)
7426 return target;
7427 break;
7429 case BUILT_IN_STRNCAT:
7430 target = expand_builtin_strncat (exp, target);
7431 if (target)
7432 return target;
7433 break;
7435 case BUILT_IN_STRNCPY:
7436 target = expand_builtin_strncpy (exp, target);
7437 if (target)
7438 return target;
7439 break;
7441 case BUILT_IN_STPCPY:
7442 target = expand_builtin_stpcpy (exp, target, mode);
7443 if (target)
7444 return target;
7445 break;
7447 case BUILT_IN_STPNCPY:
7448 target = expand_builtin_stpncpy (exp, target);
7449 if (target)
7450 return target;
7451 break;
7453 case BUILT_IN_MEMCHR:
7454 target = expand_builtin_memchr (exp, target);
7455 if (target)
7456 return target;
7457 break;
7459 case BUILT_IN_MEMCPY:
7460 target = expand_builtin_memcpy (exp, target);
7461 if (target)
7462 return target;
7463 break;
7465 case BUILT_IN_MEMMOVE:
7466 target = expand_builtin_memmove (exp, target);
7467 if (target)
7468 return target;
7469 break;
7471 case BUILT_IN_MEMPCPY:
7472 target = expand_builtin_mempcpy (exp, target);
7473 if (target)
7474 return target;
7475 break;
7477 case BUILT_IN_MEMSET:
7478 target = expand_builtin_memset (exp, target, mode);
7479 if (target)
7480 return target;
7481 break;
7483 case BUILT_IN_BZERO:
7484 target = expand_builtin_bzero (exp);
7485 if (target)
7486 return target;
7487 break;
7489 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7490 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
7491 when changing it to a strcmp call. */
7492 case BUILT_IN_STRCMP_EQ:
7493 target = expand_builtin_memcmp (exp, target, true);
7494 if (target)
7495 return target;
7497 /* Change this call back to a BUILT_IN_STRCMP. */
7498 TREE_OPERAND (exp, 1)
7499 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7501 /* Delete the last parameter. */
7502 unsigned int i;
7503 vec<tree, va_gc> *arg_vec;
7504 vec_alloc (arg_vec, 2);
7505 for (i = 0; i < 2; i++)
7506 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7507 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7508 /* FALLTHROUGH */
7510 case BUILT_IN_STRCMP:
7511 target = expand_builtin_strcmp (exp, target);
7512 if (target)
7513 return target;
7514 break;
7516 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7517 back to a BUILT_IN_STRNCMP. */
7518 case BUILT_IN_STRNCMP_EQ:
7519 target = expand_builtin_memcmp (exp, target, true);
7520 if (target)
7521 return target;
7523 /* Change it back to a BUILT_IN_STRNCMP. */
7524 TREE_OPERAND (exp, 1)
7525 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7526 /* FALLTHROUGH */
7528 case BUILT_IN_STRNCMP:
7529 target = expand_builtin_strncmp (exp, target, mode);
7530 if (target)
7531 return target;
7532 break;
7534 case BUILT_IN_BCMP:
7535 case BUILT_IN_MEMCMP:
7536 case BUILT_IN_MEMCMP_EQ:
7537 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7538 if (target)
7539 return target;
7540 if (fcode == BUILT_IN_MEMCMP_EQ)
7542 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7543 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7545 break;
7547 case BUILT_IN_SETJMP:
7548 /* This should have been lowered to the builtins below. */
7549 gcc_unreachable ();
7551 case BUILT_IN_SETJMP_SETUP:
7552 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7553 and the receiver label. */
7554 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7556 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7557 VOIDmode, EXPAND_NORMAL);
7558 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7559 rtx_insn *label_r = label_rtx (label);
7561 /* This is copied from the handling of non-local gotos. */
7562 expand_builtin_setjmp_setup (buf_addr, label_r);
7563 nonlocal_goto_handler_labels
7564 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7565 nonlocal_goto_handler_labels);
7566 /* ??? Do not let expand_label treat us as such since we would
7567 not want to be both on the list of non-local labels and on
7568 the list of forced labels. */
7569 FORCED_LABEL (label) = 0;
7570 return const0_rtx;
7572 break;
7574 case BUILT_IN_SETJMP_RECEIVER:
7575 /* __builtin_setjmp_receiver is passed the receiver label. */
7576 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7578 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7579 rtx_insn *label_r = label_rtx (label);
7581 expand_builtin_setjmp_receiver (label_r);
7582 return const0_rtx;
7584 break;
7586 /* __builtin_longjmp is passed a pointer to an array of five words.
7587 It's similar to the C library longjmp function but works with
7588 __builtin_setjmp above. */
7589 case BUILT_IN_LONGJMP:
7590 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7592 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7593 VOIDmode, EXPAND_NORMAL);
7594 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7596 if (value != const1_rtx)
7598 error ("%<__builtin_longjmp%> second argument must be 1");
7599 return const0_rtx;
7602 expand_builtin_longjmp (buf_addr, value);
7603 return const0_rtx;
7605 break;
7607 case BUILT_IN_NONLOCAL_GOTO:
7608 target = expand_builtin_nonlocal_goto (exp);
7609 if (target)
7610 return target;
7611 break;
7613 /* This updates the setjmp buffer that is its argument with the value
7614 of the current stack pointer. */
7615 case BUILT_IN_UPDATE_SETJMP_BUF:
7616 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7618 rtx buf_addr
7619 = expand_normal (CALL_EXPR_ARG (exp, 0));
7621 expand_builtin_update_setjmp_buf (buf_addr);
7622 return const0_rtx;
7624 break;
7626 case BUILT_IN_TRAP:
7627 expand_builtin_trap ();
7628 return const0_rtx;
7630 case BUILT_IN_UNREACHABLE:
7631 expand_builtin_unreachable ();
7632 return const0_rtx;
7634 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7635 case BUILT_IN_SIGNBITD32:
7636 case BUILT_IN_SIGNBITD64:
7637 case BUILT_IN_SIGNBITD128:
7638 target = expand_builtin_signbit (exp, target);
7639 if (target)
7640 return target;
7641 break;
7643 /* Various hooks for the DWARF 2 __throw routine. */
7644 case BUILT_IN_UNWIND_INIT:
7645 expand_builtin_unwind_init ();
7646 return const0_rtx;
7647 case BUILT_IN_DWARF_CFA:
7648 return virtual_cfa_rtx;
7649 #ifdef DWARF2_UNWIND_INFO
7650 case BUILT_IN_DWARF_SP_COLUMN:
7651 return expand_builtin_dwarf_sp_column ();
7652 case BUILT_IN_INIT_DWARF_REG_SIZES:
7653 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7654 return const0_rtx;
7655 #endif
7656 case BUILT_IN_FROB_RETURN_ADDR:
7657 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7658 case BUILT_IN_EXTRACT_RETURN_ADDR:
7659 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7660 case BUILT_IN_EH_RETURN:
7661 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7662 CALL_EXPR_ARG (exp, 1));
7663 return const0_rtx;
7664 case BUILT_IN_EH_RETURN_DATA_REGNO:
7665 return expand_builtin_eh_return_data_regno (exp);
7666 case BUILT_IN_EXTEND_POINTER:
7667 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7668 case BUILT_IN_EH_POINTER:
7669 return expand_builtin_eh_pointer (exp);
7670 case BUILT_IN_EH_FILTER:
7671 return expand_builtin_eh_filter (exp);
7672 case BUILT_IN_EH_COPY_VALUES:
7673 return expand_builtin_eh_copy_values (exp);
7675 case BUILT_IN_VA_START:
7676 return expand_builtin_va_start (exp);
7677 case BUILT_IN_VA_END:
7678 return expand_builtin_va_end (exp);
7679 case BUILT_IN_VA_COPY:
7680 return expand_builtin_va_copy (exp);
7681 case BUILT_IN_EXPECT:
7682 return expand_builtin_expect (exp, target);
7683 case BUILT_IN_EXPECT_WITH_PROBABILITY:
7684 return expand_builtin_expect_with_probability (exp, target);
7685 case BUILT_IN_ASSUME_ALIGNED:
7686 return expand_builtin_assume_aligned (exp, target);
7687 case BUILT_IN_PREFETCH:
7688 expand_builtin_prefetch (exp);
7689 return const0_rtx;
7691 case BUILT_IN_INIT_TRAMPOLINE:
7692 return expand_builtin_init_trampoline (exp, true);
7693 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7694 return expand_builtin_init_trampoline (exp, false);
7695 case BUILT_IN_ADJUST_TRAMPOLINE:
7696 return expand_builtin_adjust_trampoline (exp);
7698 case BUILT_IN_INIT_DESCRIPTOR:
7699 return expand_builtin_init_descriptor (exp);
7700 case BUILT_IN_ADJUST_DESCRIPTOR:
7701 return expand_builtin_adjust_descriptor (exp);
7703 case BUILT_IN_FORK:
7704 case BUILT_IN_EXECL:
7705 case BUILT_IN_EXECV:
7706 case BUILT_IN_EXECLP:
7707 case BUILT_IN_EXECLE:
7708 case BUILT_IN_EXECVP:
7709 case BUILT_IN_EXECVE:
7710 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7711 if (target)
7712 return target;
7713 break;
7715 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7716 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7717 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7718 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7719 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7720 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7721 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7722 if (target)
7723 return target;
7724 break;
7726 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7727 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7728 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7729 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7730 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7731 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7732 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7733 if (target)
7734 return target;
7735 break;
7737 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7738 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7739 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7740 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7741 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7742 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7743 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7744 if (target)
7745 return target;
7746 break;
7748 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7749 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7750 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7751 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7752 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7753 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7754 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7755 if (target)
7756 return target;
7757 break;
7759 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7760 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7761 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7762 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7763 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7764 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7765 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7766 if (target)
7767 return target;
7768 break;
7770 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7771 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7772 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7773 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7774 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7775 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7776 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7777 if (target)
7778 return target;
7779 break;
7781 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7782 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7783 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7784 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7785 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7786 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7787 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7788 if (target)
7789 return target;
7790 break;
7792 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7793 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7794 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7795 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7796 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7797 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7798 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7799 if (target)
7800 return target;
7801 break;
7803 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7804 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7805 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7806 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7807 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7808 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7809 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7810 if (target)
7811 return target;
7812 break;
7814 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7815 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7816 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7817 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7818 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7819 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7820 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7821 if (target)
7822 return target;
7823 break;
7825 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7826 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7827 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7828 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7829 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7830 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7831 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7832 if (target)
7833 return target;
7834 break;
7836 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7837 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7838 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7839 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7840 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7841 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7842 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7843 if (target)
7844 return target;
7845 break;
7847 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7848 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7849 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7850 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7851 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7852 if (mode == VOIDmode)
7853 mode = TYPE_MODE (boolean_type_node);
7854 if (!target || !register_operand (target, mode))
7855 target = gen_reg_rtx (mode);
7857 mode = get_builtin_sync_mode
7858 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7859 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7860 if (target)
7861 return target;
7862 break;
7864 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7865 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7866 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7867 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7868 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7869 mode = get_builtin_sync_mode
7870 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7871 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7872 if (target)
7873 return target;
7874 break;
7876 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7877 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7878 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7879 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7880 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7881 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7882 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7883 if (target)
7884 return target;
7885 break;
7887 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7888 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7889 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7890 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7891 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7892 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7893 expand_builtin_sync_lock_release (mode, exp);
7894 return const0_rtx;
7896 case BUILT_IN_SYNC_SYNCHRONIZE:
7897 expand_builtin_sync_synchronize ();
7898 return const0_rtx;
7900 case BUILT_IN_ATOMIC_EXCHANGE_1:
7901 case BUILT_IN_ATOMIC_EXCHANGE_2:
7902 case BUILT_IN_ATOMIC_EXCHANGE_4:
7903 case BUILT_IN_ATOMIC_EXCHANGE_8:
7904 case BUILT_IN_ATOMIC_EXCHANGE_16:
7905 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7906 target = expand_builtin_atomic_exchange (mode, exp, target);
7907 if (target)
7908 return target;
7909 break;
7911 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7912 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7913 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7914 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7915 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7917 unsigned int nargs, z;
7918 vec<tree, va_gc> *vec;
7920 mode =
7921 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7922 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7923 if (target)
7924 return target;
7926 /* If this is turned into an external library call, the weak parameter
7927 must be dropped to match the expected parameter list. */
7928 nargs = call_expr_nargs (exp);
7929 vec_alloc (vec, nargs - 1);
7930 for (z = 0; z < 3; z++)
7931 vec->quick_push (CALL_EXPR_ARG (exp, z));
7932 /* Skip the boolean weak parameter. */
7933 for (z = 4; z < 6; z++)
7934 vec->quick_push (CALL_EXPR_ARG (exp, z));
7935 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7936 break;
7939 case BUILT_IN_ATOMIC_LOAD_1:
7940 case BUILT_IN_ATOMIC_LOAD_2:
7941 case BUILT_IN_ATOMIC_LOAD_4:
7942 case BUILT_IN_ATOMIC_LOAD_8:
7943 case BUILT_IN_ATOMIC_LOAD_16:
7944 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7945 target = expand_builtin_atomic_load (mode, exp, target);
7946 if (target)
7947 return target;
7948 break;
7950 case BUILT_IN_ATOMIC_STORE_1:
7951 case BUILT_IN_ATOMIC_STORE_2:
7952 case BUILT_IN_ATOMIC_STORE_4:
7953 case BUILT_IN_ATOMIC_STORE_8:
7954 case BUILT_IN_ATOMIC_STORE_16:
7955 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7956 target = expand_builtin_atomic_store (mode, exp);
7957 if (target)
7958 return const0_rtx;
7959 break;
7961 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7962 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7963 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7964 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7965 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7967 enum built_in_function lib;
7968 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7969 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7970 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7971 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7972 ignore, lib);
7973 if (target)
7974 return target;
7975 break;
7977 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7978 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7979 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7980 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7981 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7983 enum built_in_function lib;
7984 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7985 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7986 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7987 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7988 ignore, lib);
7989 if (target)
7990 return target;
7991 break;
7993 case BUILT_IN_ATOMIC_AND_FETCH_1:
7994 case BUILT_IN_ATOMIC_AND_FETCH_2:
7995 case BUILT_IN_ATOMIC_AND_FETCH_4:
7996 case BUILT_IN_ATOMIC_AND_FETCH_8:
7997 case BUILT_IN_ATOMIC_AND_FETCH_16:
7999 enum built_in_function lib;
8000 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
8001 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
8002 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
8003 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
8004 ignore, lib);
8005 if (target)
8006 return target;
8007 break;
8009 case BUILT_IN_ATOMIC_NAND_FETCH_1:
8010 case BUILT_IN_ATOMIC_NAND_FETCH_2:
8011 case BUILT_IN_ATOMIC_NAND_FETCH_4:
8012 case BUILT_IN_ATOMIC_NAND_FETCH_8:
8013 case BUILT_IN_ATOMIC_NAND_FETCH_16:
8015 enum built_in_function lib;
8016 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
8017 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
8018 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
8019 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
8020 ignore, lib);
8021 if (target)
8022 return target;
8023 break;
8025 case BUILT_IN_ATOMIC_XOR_FETCH_1:
8026 case BUILT_IN_ATOMIC_XOR_FETCH_2:
8027 case BUILT_IN_ATOMIC_XOR_FETCH_4:
8028 case BUILT_IN_ATOMIC_XOR_FETCH_8:
8029 case BUILT_IN_ATOMIC_XOR_FETCH_16:
8031 enum built_in_function lib;
8032 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
8033 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
8034 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
8035 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
8036 ignore, lib);
8037 if (target)
8038 return target;
8039 break;
8041 case BUILT_IN_ATOMIC_OR_FETCH_1:
8042 case BUILT_IN_ATOMIC_OR_FETCH_2:
8043 case BUILT_IN_ATOMIC_OR_FETCH_4:
8044 case BUILT_IN_ATOMIC_OR_FETCH_8:
8045 case BUILT_IN_ATOMIC_OR_FETCH_16:
8047 enum built_in_function lib;
8048 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
8049 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
8050 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
8051 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
8052 ignore, lib);
8053 if (target)
8054 return target;
8055 break;
8057 case BUILT_IN_ATOMIC_FETCH_ADD_1:
8058 case BUILT_IN_ATOMIC_FETCH_ADD_2:
8059 case BUILT_IN_ATOMIC_FETCH_ADD_4:
8060 case BUILT_IN_ATOMIC_FETCH_ADD_8:
8061 case BUILT_IN_ATOMIC_FETCH_ADD_16:
8062 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
8063 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
8064 ignore, BUILT_IN_NONE);
8065 if (target)
8066 return target;
8067 break;
8069 case BUILT_IN_ATOMIC_FETCH_SUB_1:
8070 case BUILT_IN_ATOMIC_FETCH_SUB_2:
8071 case BUILT_IN_ATOMIC_FETCH_SUB_4:
8072 case BUILT_IN_ATOMIC_FETCH_SUB_8:
8073 case BUILT_IN_ATOMIC_FETCH_SUB_16:
8074 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
8075 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
8076 ignore, BUILT_IN_NONE);
8077 if (target)
8078 return target;
8079 break;
8081 case BUILT_IN_ATOMIC_FETCH_AND_1:
8082 case BUILT_IN_ATOMIC_FETCH_AND_2:
8083 case BUILT_IN_ATOMIC_FETCH_AND_4:
8084 case BUILT_IN_ATOMIC_FETCH_AND_8:
8085 case BUILT_IN_ATOMIC_FETCH_AND_16:
8086 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8087 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8088 ignore, BUILT_IN_NONE);
8089 if (target)
8090 return target;
8091 break;
8093 case BUILT_IN_ATOMIC_FETCH_NAND_1:
8094 case BUILT_IN_ATOMIC_FETCH_NAND_2:
8095 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8096 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8097 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8098 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8099 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8100 ignore, BUILT_IN_NONE);
8101 if (target)
8102 return target;
8103 break;
8105 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8106 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8107 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8108 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8109 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8110 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8111 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8112 ignore, BUILT_IN_NONE);
8113 if (target)
8114 return target;
8115 break;
8117 case BUILT_IN_ATOMIC_FETCH_OR_1:
8118 case BUILT_IN_ATOMIC_FETCH_OR_2:
8119 case BUILT_IN_ATOMIC_FETCH_OR_4:
8120 case BUILT_IN_ATOMIC_FETCH_OR_8:
8121 case BUILT_IN_ATOMIC_FETCH_OR_16:
8122 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8123 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8124 ignore, BUILT_IN_NONE);
8125 if (target)
8126 return target;
8127 break;
8129 case BUILT_IN_ATOMIC_TEST_AND_SET:
8130 return expand_builtin_atomic_test_and_set (exp, target);
8132 case BUILT_IN_ATOMIC_CLEAR:
8133 return expand_builtin_atomic_clear (exp);
8135 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8136 return expand_builtin_atomic_always_lock_free (exp);
8138 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8139 target = expand_builtin_atomic_is_lock_free (exp);
8140 if (target)
8141 return target;
8142 break;
8144 case BUILT_IN_ATOMIC_THREAD_FENCE:
8145 expand_builtin_atomic_thread_fence (exp);
8146 return const0_rtx;
8148 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8149 expand_builtin_atomic_signal_fence (exp);
8150 return const0_rtx;
8152 case BUILT_IN_OBJECT_SIZE:
8153 return expand_builtin_object_size (exp);
8155 case BUILT_IN_MEMCPY_CHK:
8156 case BUILT_IN_MEMPCPY_CHK:
8157 case BUILT_IN_MEMMOVE_CHK:
8158 case BUILT_IN_MEMSET_CHK:
8159 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8160 if (target)
8161 return target;
8162 break;
8164 case BUILT_IN_STRCPY_CHK:
8165 case BUILT_IN_STPCPY_CHK:
8166 case BUILT_IN_STRNCPY_CHK:
8167 case BUILT_IN_STPNCPY_CHK:
8168 case BUILT_IN_STRCAT_CHK:
8169 case BUILT_IN_STRNCAT_CHK:
8170 case BUILT_IN_SNPRINTF_CHK:
8171 case BUILT_IN_VSNPRINTF_CHK:
8172 maybe_emit_chk_warning (exp, fcode);
8173 break;
8175 case BUILT_IN_SPRINTF_CHK:
8176 case BUILT_IN_VSPRINTF_CHK:
8177 maybe_emit_sprintf_chk_warning (exp, fcode);
8178 break;
8180 case BUILT_IN_FREE:
8181 if (warn_free_nonheap_object)
8182 maybe_emit_free_warning (exp);
8183 break;
8185 case BUILT_IN_THREAD_POINTER:
8186 return expand_builtin_thread_pointer (exp, target);
8188 case BUILT_IN_SET_THREAD_POINTER:
8189 expand_builtin_set_thread_pointer (exp);
8190 return const0_rtx;
8192 case BUILT_IN_ACC_ON_DEVICE:
8193 /* Do library call, if we failed to expand the builtin when
8194 folding. */
8195 break;
8197 case BUILT_IN_GOACC_PARLEVEL_ID:
8198 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8199 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8201 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8202 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8204 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8205 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8206 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8207 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8208 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8209 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8210 return expand_speculation_safe_value (mode, exp, target, ignore);
8212 default: /* just do library call, if unknown builtin */
8213 break;
8216 /* The switch statement above can drop through to cause the function
8217 to be called normally. */
8218 return expand_call (exp, target, ignore);
8221 /* Determine whether a tree node represents a call to a built-in
8222 function. If the tree T is a call to a built-in function with
8223 the right number of arguments of the appropriate types, return
8224 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8225 Otherwise the return value is END_BUILTINS. */
8227 enum built_in_function
8228 builtin_mathfn_code (const_tree t)
8230 const_tree fndecl, arg, parmlist;
8231 const_tree argtype, parmtype;
8232 const_call_expr_arg_iterator iter;
8234 if (TREE_CODE (t) != CALL_EXPR)
8235 return END_BUILTINS;
8237 fndecl = get_callee_fndecl (t);
8238 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8239 return END_BUILTINS;
8241 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8242 init_const_call_expr_arg_iterator (t, &iter);
8243 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8245 /* If a function doesn't take a variable number of arguments,
8246 the last element in the list will have type `void'. */
8247 parmtype = TREE_VALUE (parmlist);
8248 if (VOID_TYPE_P (parmtype))
8250 if (more_const_call_expr_args_p (&iter))
8251 return END_BUILTINS;
8252 return DECL_FUNCTION_CODE (fndecl);
8255 if (! more_const_call_expr_args_p (&iter))
8256 return END_BUILTINS;
8258 arg = next_const_call_expr_arg (&iter);
8259 argtype = TREE_TYPE (arg);
8261 if (SCALAR_FLOAT_TYPE_P (parmtype))
8263 if (! SCALAR_FLOAT_TYPE_P (argtype))
8264 return END_BUILTINS;
8266 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8268 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8269 return END_BUILTINS;
8271 else if (POINTER_TYPE_P (parmtype))
8273 if (! POINTER_TYPE_P (argtype))
8274 return END_BUILTINS;
8276 else if (INTEGRAL_TYPE_P (parmtype))
8278 if (! INTEGRAL_TYPE_P (argtype))
8279 return END_BUILTINS;
8281 else
8282 return END_BUILTINS;
8285 /* Variable-length argument list. */
8286 return DECL_FUNCTION_CODE (fndecl);
8289 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8290 evaluate to a constant. */
8292 static tree
8293 fold_builtin_constant_p (tree arg)
8295 /* We return 1 for a numeric type that's known to be a constant
8296 value at compile-time or for an aggregate type that's a
8297 literal constant. */
8298 STRIP_NOPS (arg);
8300 /* If we know this is a constant, emit the constant of one. */
8301 if (CONSTANT_CLASS_P (arg)
8302 || (TREE_CODE (arg) == CONSTRUCTOR
8303 && TREE_CONSTANT (arg)))
8304 return integer_one_node;
8305 if (TREE_CODE (arg) == ADDR_EXPR)
8307 tree op = TREE_OPERAND (arg, 0);
8308 if (TREE_CODE (op) == STRING_CST
8309 || (TREE_CODE (op) == ARRAY_REF
8310 && integer_zerop (TREE_OPERAND (op, 1))
8311 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8312 return integer_one_node;
8315 /* If this expression has side effects, show we don't know it to be a
8316 constant. Likewise if it's a pointer or aggregate type since in
8317 those case we only want literals, since those are only optimized
8318 when generating RTL, not later.
8319 And finally, if we are compiling an initializer, not code, we
8320 need to return a definite result now; there's not going to be any
8321 more optimization done. */
8322 if (TREE_SIDE_EFFECTS (arg)
8323 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8324 || POINTER_TYPE_P (TREE_TYPE (arg))
8325 || cfun == 0
8326 || folding_initializer
8327 || force_folding_builtin_constant_p)
8328 return integer_zero_node;
8330 return NULL_TREE;
8333 /* Create builtin_expect or builtin_expect_with_probability
8334 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8335 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8336 builtin_expect_with_probability instead uses third argument as PROBABILITY
8337 value. */
8339 static tree
8340 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8341 tree predictor, tree probability)
8343 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
8345 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
8346 : BUILT_IN_EXPECT_WITH_PROBABILITY);
8347 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8348 ret_type = TREE_TYPE (TREE_TYPE (fn));
8349 pred_type = TREE_VALUE (arg_types);
8350 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8352 pred = fold_convert_loc (loc, pred_type, pred);
8353 expected = fold_convert_loc (loc, expected_type, expected);
8355 if (probability)
8356 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
8357 else
8358 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8359 predictor);
8361 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8362 build_int_cst (ret_type, 0));
8365 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
8366 NULL_TREE if no simplification is possible. */
8368 tree
8369 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
8370 tree arg3)
8372 tree inner, fndecl, inner_arg0;
8373 enum tree_code code;
8375 /* Distribute the expected value over short-circuiting operators.
8376 See through the cast from truthvalue_type_node to long. */
8377 inner_arg0 = arg0;
8378 while (CONVERT_EXPR_P (inner_arg0)
8379 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8380 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8381 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8383 /* If this is a builtin_expect within a builtin_expect keep the
8384 inner one. See through a comparison against a constant. It
8385 might have been added to create a thruthvalue. */
8386 inner = inner_arg0;
8388 if (COMPARISON_CLASS_P (inner)
8389 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8390 inner = TREE_OPERAND (inner, 0);
8392 if (TREE_CODE (inner) == CALL_EXPR
8393 && (fndecl = get_callee_fndecl (inner))
8394 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
8395 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
8396 return arg0;
8398 inner = inner_arg0;
8399 code = TREE_CODE (inner);
8400 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8402 tree op0 = TREE_OPERAND (inner, 0);
8403 tree op1 = TREE_OPERAND (inner, 1);
8404 arg1 = save_expr (arg1);
8406 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
8407 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
8408 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8410 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8413 /* If the argument isn't invariant then there's nothing else we can do. */
8414 if (!TREE_CONSTANT (inner_arg0))
8415 return NULL_TREE;
8417 /* If we expect that a comparison against the argument will fold to
8418 a constant return the constant. In practice, this means a true
8419 constant or the address of a non-weak symbol. */
8420 inner = inner_arg0;
8421 STRIP_NOPS (inner);
8422 if (TREE_CODE (inner) == ADDR_EXPR)
8426 inner = TREE_OPERAND (inner, 0);
8428 while (TREE_CODE (inner) == COMPONENT_REF
8429 || TREE_CODE (inner) == ARRAY_REF);
8430 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8431 return NULL_TREE;
8434 /* Otherwise, ARG0 already has the proper type for the return value. */
8435 return arg0;
8438 /* Fold a call to __builtin_classify_type with argument ARG. */
8440 static tree
8441 fold_builtin_classify_type (tree arg)
8443 if (arg == 0)
8444 return build_int_cst (integer_type_node, no_type_class);
8446 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8449 /* Fold a call to __builtin_strlen with argument ARG. */
8451 static tree
8452 fold_builtin_strlen (location_t loc, tree type, tree arg)
8454 if (!validate_arg (arg, POINTER_TYPE))
8455 return NULL_TREE;
8456 else
8458 tree nonstr = NULL_TREE;
8459 tree len = c_strlen (arg, 0, &nonstr);
8461 if (len)
8462 return fold_convert_loc (loc, type, len);
8464 if (!nonstr)
8465 c_strlen (arg, 1, &nonstr);
8467 if (nonstr)
8469 if (EXPR_HAS_LOCATION (arg))
8470 loc = EXPR_LOCATION (arg);
8471 else if (loc == UNKNOWN_LOCATION)
8472 loc = input_location;
8473 warn_string_no_nul (loc, "strlen", arg, nonstr);
8476 return NULL_TREE;
8480 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8482 static tree
8483 fold_builtin_inf (location_t loc, tree type, int warn)
8485 REAL_VALUE_TYPE real;
8487 /* __builtin_inff is intended to be usable to define INFINITY on all
8488 targets. If an infinity is not available, INFINITY expands "to a
8489 positive constant of type float that overflows at translation
8490 time", footnote "In this case, using INFINITY will violate the
8491 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8492 Thus we pedwarn to ensure this constraint violation is
8493 diagnosed. */
8494 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8495 pedwarn (loc, 0, "target format does not support infinity");
8497 real_inf (&real);
8498 return build_real (type, real);
8501 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8502 NULL_TREE if no simplification can be made. */
8504 static tree
8505 fold_builtin_sincos (location_t loc,
8506 tree arg0, tree arg1, tree arg2)
8508 tree type;
8509 tree fndecl, call = NULL_TREE;
8511 if (!validate_arg (arg0, REAL_TYPE)
8512 || !validate_arg (arg1, POINTER_TYPE)
8513 || !validate_arg (arg2, POINTER_TYPE))
8514 return NULL_TREE;
8516 type = TREE_TYPE (arg0);
8518 /* Calculate the result when the argument is a constant. */
8519 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8520 if (fn == END_BUILTINS)
8521 return NULL_TREE;
8523 /* Canonicalize sincos to cexpi. */
8524 if (TREE_CODE (arg0) == REAL_CST)
8526 tree complex_type = build_complex_type (type);
8527 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8529 if (!call)
8531 if (!targetm.libc_has_function (function_c99_math_complex)
8532 || !builtin_decl_implicit_p (fn))
8533 return NULL_TREE;
8534 fndecl = builtin_decl_explicit (fn);
8535 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8536 call = builtin_save_expr (call);
8539 tree ptype = build_pointer_type (type);
8540 arg1 = fold_convert (ptype, arg1);
8541 arg2 = fold_convert (ptype, arg2);
8542 return build2 (COMPOUND_EXPR, void_type_node,
8543 build2 (MODIFY_EXPR, void_type_node,
8544 build_fold_indirect_ref_loc (loc, arg1),
8545 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8546 build2 (MODIFY_EXPR, void_type_node,
8547 build_fold_indirect_ref_loc (loc, arg2),
8548 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8551 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8552 Return NULL_TREE if no simplification can be made. */
8554 static tree
8555 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8557 if (!validate_arg (arg1, POINTER_TYPE)
8558 || !validate_arg (arg2, POINTER_TYPE)
8559 || !validate_arg (len, INTEGER_TYPE))
8560 return NULL_TREE;
8562 /* If the LEN parameter is zero, return zero. */
8563 if (integer_zerop (len))
8564 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8565 arg1, arg2);
8567 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8568 if (operand_equal_p (arg1, arg2, 0))
8569 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8571 /* If len parameter is one, return an expression corresponding to
8572 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8573 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8575 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8576 tree cst_uchar_ptr_node
8577 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8579 tree ind1
8580 = fold_convert_loc (loc, integer_type_node,
8581 build1 (INDIRECT_REF, cst_uchar_node,
8582 fold_convert_loc (loc,
8583 cst_uchar_ptr_node,
8584 arg1)));
8585 tree ind2
8586 = fold_convert_loc (loc, integer_type_node,
8587 build1 (INDIRECT_REF, cst_uchar_node,
8588 fold_convert_loc (loc,
8589 cst_uchar_ptr_node,
8590 arg2)));
8591 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8594 return NULL_TREE;
8597 /* Fold a call to builtin isascii with argument ARG. */
8599 static tree
8600 fold_builtin_isascii (location_t loc, tree arg)
8602 if (!validate_arg (arg, INTEGER_TYPE))
8603 return NULL_TREE;
8604 else
8606 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8607 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8608 build_int_cst (integer_type_node,
8609 ~ (unsigned HOST_WIDE_INT) 0x7f));
8610 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8611 arg, integer_zero_node);
8615 /* Fold a call to builtin toascii with argument ARG. */
8617 static tree
8618 fold_builtin_toascii (location_t loc, tree arg)
8620 if (!validate_arg (arg, INTEGER_TYPE))
8621 return NULL_TREE;
8623 /* Transform toascii(c) -> (c & 0x7f). */
8624 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8625 build_int_cst (integer_type_node, 0x7f));
8628 /* Fold a call to builtin isdigit with argument ARG. */
8630 static tree
8631 fold_builtin_isdigit (location_t loc, tree arg)
8633 if (!validate_arg (arg, INTEGER_TYPE))
8634 return NULL_TREE;
8635 else
8637 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8638 /* According to the C standard, isdigit is unaffected by locale.
8639 However, it definitely is affected by the target character set. */
8640 unsigned HOST_WIDE_INT target_digit0
8641 = lang_hooks.to_target_charset ('0');
8643 if (target_digit0 == 0)
8644 return NULL_TREE;
8646 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8647 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8648 build_int_cst (unsigned_type_node, target_digit0));
8649 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8650 build_int_cst (unsigned_type_node, 9));
8654 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8656 static tree
8657 fold_builtin_fabs (location_t loc, tree arg, tree type)
8659 if (!validate_arg (arg, REAL_TYPE))
8660 return NULL_TREE;
8662 arg = fold_convert_loc (loc, type, arg);
8663 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8666 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8668 static tree
8669 fold_builtin_abs (location_t loc, tree arg, tree type)
8671 if (!validate_arg (arg, INTEGER_TYPE))
8672 return NULL_TREE;
8674 arg = fold_convert_loc (loc, type, arg);
8675 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8678 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8680 static tree
8681 fold_builtin_carg (location_t loc, tree arg, tree type)
8683 if (validate_arg (arg, COMPLEX_TYPE)
8684 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8686 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8688 if (atan2_fn)
8690 tree new_arg = builtin_save_expr (arg);
8691 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8692 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8693 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8697 return NULL_TREE;
8700 /* Fold a call to builtin frexp, we can assume the base is 2. */
8702 static tree
8703 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8705 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8706 return NULL_TREE;
8708 STRIP_NOPS (arg0);
8710 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8711 return NULL_TREE;
8713 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8715 /* Proceed if a valid pointer type was passed in. */
8716 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8718 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8719 tree frac, exp;
8721 switch (value->cl)
8723 case rvc_zero:
8724 /* For +-0, return (*exp = 0, +-0). */
8725 exp = integer_zero_node;
8726 frac = arg0;
8727 break;
8728 case rvc_nan:
8729 case rvc_inf:
8730 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8731 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8732 case rvc_normal:
8734 /* Since the frexp function always expects base 2, and in
8735 GCC normalized significands are already in the range
8736 [0.5, 1.0), we have exactly what frexp wants. */
8737 REAL_VALUE_TYPE frac_rvt = *value;
8738 SET_REAL_EXP (&frac_rvt, 0);
8739 frac = build_real (rettype, frac_rvt);
8740 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8742 break;
8743 default:
8744 gcc_unreachable ();
8747 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8748 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8749 TREE_SIDE_EFFECTS (arg1) = 1;
8750 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8753 return NULL_TREE;
8756 /* Fold a call to builtin modf. */
8758 static tree
8759 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8761 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8762 return NULL_TREE;
8764 STRIP_NOPS (arg0);
8766 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8767 return NULL_TREE;
8769 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8771 /* Proceed if a valid pointer type was passed in. */
8772 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8774 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8775 REAL_VALUE_TYPE trunc, frac;
8777 switch (value->cl)
8779 case rvc_nan:
8780 case rvc_zero:
8781 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8782 trunc = frac = *value;
8783 break;
8784 case rvc_inf:
8785 /* For +-Inf, return (*arg1 = arg0, +-0). */
8786 frac = dconst0;
8787 frac.sign = value->sign;
8788 trunc = *value;
8789 break;
8790 case rvc_normal:
8791 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8792 real_trunc (&trunc, VOIDmode, value);
8793 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8794 /* If the original number was negative and already
8795 integral, then the fractional part is -0.0. */
8796 if (value->sign && frac.cl == rvc_zero)
8797 frac.sign = value->sign;
8798 break;
8801 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8802 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8803 build_real (rettype, trunc));
8804 TREE_SIDE_EFFECTS (arg1) = 1;
8805 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8806 build_real (rettype, frac));
8809 return NULL_TREE;
8812 /* Given a location LOC, an interclass builtin function decl FNDECL
8813 and its single argument ARG, return an folded expression computing
8814 the same, or NULL_TREE if we either couldn't or didn't want to fold
8815 (the latter happen if there's an RTL instruction available). */
8817 static tree
8818 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8820 machine_mode mode;
8822 if (!validate_arg (arg, REAL_TYPE))
8823 return NULL_TREE;
8825 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8826 return NULL_TREE;
8828 mode = TYPE_MODE (TREE_TYPE (arg));
8830 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8832 /* If there is no optab, try generic code. */
8833 switch (DECL_FUNCTION_CODE (fndecl))
8835 tree result;
8837 CASE_FLT_FN (BUILT_IN_ISINF):
8839 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8840 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8841 tree type = TREE_TYPE (arg);
8842 REAL_VALUE_TYPE r;
8843 char buf[128];
8845 if (is_ibm_extended)
8847 /* NaN and Inf are encoded in the high-order double value
8848 only. The low-order value is not significant. */
8849 type = double_type_node;
8850 mode = DFmode;
8851 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8853 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8854 real_from_string (&r, buf);
8855 result = build_call_expr (isgr_fn, 2,
8856 fold_build1_loc (loc, ABS_EXPR, type, arg),
8857 build_real (type, r));
8858 return result;
8860 CASE_FLT_FN (BUILT_IN_FINITE):
8861 case BUILT_IN_ISFINITE:
8863 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8864 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8865 tree type = TREE_TYPE (arg);
8866 REAL_VALUE_TYPE r;
8867 char buf[128];
8869 if (is_ibm_extended)
8871 /* NaN and Inf are encoded in the high-order double value
8872 only. The low-order value is not significant. */
8873 type = double_type_node;
8874 mode = DFmode;
8875 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8877 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8878 real_from_string (&r, buf);
8879 result = build_call_expr (isle_fn, 2,
8880 fold_build1_loc (loc, ABS_EXPR, type, arg),
8881 build_real (type, r));
8882 /*result = fold_build2_loc (loc, UNGT_EXPR,
8883 TREE_TYPE (TREE_TYPE (fndecl)),
8884 fold_build1_loc (loc, ABS_EXPR, type, arg),
8885 build_real (type, r));
8886 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8887 TREE_TYPE (TREE_TYPE (fndecl)),
8888 result);*/
8889 return result;
8891 case BUILT_IN_ISNORMAL:
8893 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8894 islessequal(fabs(x),DBL_MAX). */
8895 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8896 tree type = TREE_TYPE (arg);
8897 tree orig_arg, max_exp, min_exp;
8898 machine_mode orig_mode = mode;
8899 REAL_VALUE_TYPE rmax, rmin;
8900 char buf[128];
8902 orig_arg = arg = builtin_save_expr (arg);
8903 if (is_ibm_extended)
8905 /* Use double to test the normal range of IBM extended
8906 precision. Emin for IBM extended precision is
8907 different to emin for IEEE double, being 53 higher
8908 since the low double exponent is at least 53 lower
8909 than the high double exponent. */
8910 type = double_type_node;
8911 mode = DFmode;
8912 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8914 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8916 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8917 real_from_string (&rmax, buf);
8918 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8919 real_from_string (&rmin, buf);
8920 max_exp = build_real (type, rmax);
8921 min_exp = build_real (type, rmin);
8923 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8924 if (is_ibm_extended)
8926 /* Testing the high end of the range is done just using
8927 the high double, using the same test as isfinite().
8928 For the subnormal end of the range we first test the
8929 high double, then if its magnitude is equal to the
8930 limit of 0x1p-969, we test whether the low double is
8931 non-zero and opposite sign to the high double. */
8932 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8933 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8934 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8935 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8936 arg, min_exp);
8937 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8938 complex_double_type_node, orig_arg);
8939 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8940 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8941 tree zero = build_real (type, dconst0);
8942 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8943 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8944 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8945 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8946 fold_build3 (COND_EXPR,
8947 integer_type_node,
8948 hilt, logt, lolt));
8949 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8950 eq_min, ok_lo);
8951 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8952 gt_min, eq_min);
8954 else
8956 tree const isge_fn
8957 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8958 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8960 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8961 max_exp, min_exp);
8962 return result;
8964 default:
8965 break;
8968 return NULL_TREE;
8971 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8972 ARG is the argument for the call. */
8974 static tree
8975 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8977 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8979 if (!validate_arg (arg, REAL_TYPE))
8980 return NULL_TREE;
8982 switch (builtin_index)
8984 case BUILT_IN_ISINF:
8985 if (!HONOR_INFINITIES (arg))
8986 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8988 return NULL_TREE;
8990 case BUILT_IN_ISINF_SIGN:
8992 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8993 /* In a boolean context, GCC will fold the inner COND_EXPR to
8994 1. So e.g. "if (isinf_sign(x))" would be folded to just
8995 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8996 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8997 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8998 tree tmp = NULL_TREE;
9000 arg = builtin_save_expr (arg);
9002 if (signbit_fn && isinf_fn)
9004 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9005 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9007 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9008 signbit_call, integer_zero_node);
9009 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9010 isinf_call, integer_zero_node);
9012 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9013 integer_minus_one_node, integer_one_node);
9014 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9015 isinf_call, tmp,
9016 integer_zero_node);
9019 return tmp;
9022 case BUILT_IN_ISFINITE:
9023 if (!HONOR_NANS (arg)
9024 && !HONOR_INFINITIES (arg))
9025 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9027 return NULL_TREE;
9029 case BUILT_IN_ISNAN:
9030 if (!HONOR_NANS (arg))
9031 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9034 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
9035 if (is_ibm_extended)
9037 /* NaN and Inf are encoded in the high-order double value
9038 only. The low-order value is not significant. */
9039 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9042 arg = builtin_save_expr (arg);
9043 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9045 default:
9046 gcc_unreachable ();
9050 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9051 This builtin will generate code to return the appropriate floating
9052 point classification depending on the value of the floating point
9053 number passed in. The possible return values must be supplied as
9054 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9055 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9056 one floating point argument which is "type generic". */
9058 static tree
9059 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9061 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9062 arg, type, res, tmp;
9063 machine_mode mode;
9064 REAL_VALUE_TYPE r;
9065 char buf[128];
9067 /* Verify the required arguments in the original call. */
9068 if (nargs != 6
9069 || !validate_arg (args[0], INTEGER_TYPE)
9070 || !validate_arg (args[1], INTEGER_TYPE)
9071 || !validate_arg (args[2], INTEGER_TYPE)
9072 || !validate_arg (args[3], INTEGER_TYPE)
9073 || !validate_arg (args[4], INTEGER_TYPE)
9074 || !validate_arg (args[5], REAL_TYPE))
9075 return NULL_TREE;
9077 fp_nan = args[0];
9078 fp_infinite = args[1];
9079 fp_normal = args[2];
9080 fp_subnormal = args[3];
9081 fp_zero = args[4];
9082 arg = args[5];
9083 type = TREE_TYPE (arg);
9084 mode = TYPE_MODE (type);
9085 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9087 /* fpclassify(x) ->
9088 isnan(x) ? FP_NAN :
9089 (fabs(x) == Inf ? FP_INFINITE :
9090 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9091 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9093 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9094 build_real (type, dconst0));
9095 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9096 tmp, fp_zero, fp_subnormal);
9098 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9099 real_from_string (&r, buf);
9100 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9101 arg, build_real (type, r));
9102 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9104 if (HONOR_INFINITIES (mode))
9106 real_inf (&r);
9107 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9108 build_real (type, r));
9109 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9110 fp_infinite, res);
9113 if (HONOR_NANS (mode))
9115 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9116 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9119 return res;
9122 /* Fold a call to an unordered comparison function such as
9123 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9124 being called and ARG0 and ARG1 are the arguments for the call.
9125 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9126 the opposite of the desired result. UNORDERED_CODE is used
9127 for modes that can hold NaNs and ORDERED_CODE is used for
9128 the rest. */
9130 static tree
9131 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9132 enum tree_code unordered_code,
9133 enum tree_code ordered_code)
9135 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9136 enum tree_code code;
9137 tree type0, type1;
9138 enum tree_code code0, code1;
9139 tree cmp_type = NULL_TREE;
9141 type0 = TREE_TYPE (arg0);
9142 type1 = TREE_TYPE (arg1);
9144 code0 = TREE_CODE (type0);
9145 code1 = TREE_CODE (type1);
9147 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9148 /* Choose the wider of two real types. */
9149 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9150 ? type0 : type1;
9151 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9152 cmp_type = type0;
9153 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9154 cmp_type = type1;
9156 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9157 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9159 if (unordered_code == UNORDERED_EXPR)
9161 if (!HONOR_NANS (arg0))
9162 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9163 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9166 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9167 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9168 fold_build2_loc (loc, code, type, arg0, arg1));
9171 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9172 arithmetics if it can never overflow, or into internal functions that
9173 return both result of arithmetics and overflowed boolean flag in
9174 a complex integer result, or some other check for overflow.
9175 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9176 checking part of that. */
9178 static tree
9179 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9180 tree arg0, tree arg1, tree arg2)
9182 enum internal_fn ifn = IFN_LAST;
9183 /* The code of the expression corresponding to the type-generic
9184 built-in, or ERROR_MARK for the type-specific ones. */
9185 enum tree_code opcode = ERROR_MARK;
9186 bool ovf_only = false;
9188 switch (fcode)
9190 case BUILT_IN_ADD_OVERFLOW_P:
9191 ovf_only = true;
9192 /* FALLTHRU */
9193 case BUILT_IN_ADD_OVERFLOW:
9194 opcode = PLUS_EXPR;
9195 /* FALLTHRU */
9196 case BUILT_IN_SADD_OVERFLOW:
9197 case BUILT_IN_SADDL_OVERFLOW:
9198 case BUILT_IN_SADDLL_OVERFLOW:
9199 case BUILT_IN_UADD_OVERFLOW:
9200 case BUILT_IN_UADDL_OVERFLOW:
9201 case BUILT_IN_UADDLL_OVERFLOW:
9202 ifn = IFN_ADD_OVERFLOW;
9203 break;
9204 case BUILT_IN_SUB_OVERFLOW_P:
9205 ovf_only = true;
9206 /* FALLTHRU */
9207 case BUILT_IN_SUB_OVERFLOW:
9208 opcode = MINUS_EXPR;
9209 /* FALLTHRU */
9210 case BUILT_IN_SSUB_OVERFLOW:
9211 case BUILT_IN_SSUBL_OVERFLOW:
9212 case BUILT_IN_SSUBLL_OVERFLOW:
9213 case BUILT_IN_USUB_OVERFLOW:
9214 case BUILT_IN_USUBL_OVERFLOW:
9215 case BUILT_IN_USUBLL_OVERFLOW:
9216 ifn = IFN_SUB_OVERFLOW;
9217 break;
9218 case BUILT_IN_MUL_OVERFLOW_P:
9219 ovf_only = true;
9220 /* FALLTHRU */
9221 case BUILT_IN_MUL_OVERFLOW:
9222 opcode = MULT_EXPR;
9223 /* FALLTHRU */
9224 case BUILT_IN_SMUL_OVERFLOW:
9225 case BUILT_IN_SMULL_OVERFLOW:
9226 case BUILT_IN_SMULLL_OVERFLOW:
9227 case BUILT_IN_UMUL_OVERFLOW:
9228 case BUILT_IN_UMULL_OVERFLOW:
9229 case BUILT_IN_UMULLL_OVERFLOW:
9230 ifn = IFN_MUL_OVERFLOW;
9231 break;
9232 default:
9233 gcc_unreachable ();
9236 /* For the "generic" overloads, the first two arguments can have different
9237 types and the last argument determines the target type to use to check
9238 for overflow. The arguments of the other overloads all have the same
9239 type. */
9240 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9242 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9243 arguments are constant, attempt to fold the built-in call into a constant
9244 expression indicating whether or not it detected an overflow. */
9245 if (ovf_only
9246 && TREE_CODE (arg0) == INTEGER_CST
9247 && TREE_CODE (arg1) == INTEGER_CST)
9248 /* Perform the computation in the target type and check for overflow. */
9249 return omit_one_operand_loc (loc, boolean_type_node,
9250 arith_overflowed_p (opcode, type, arg0, arg1)
9251 ? boolean_true_node : boolean_false_node,
9252 arg2);
9254 tree ctype = build_complex_type (type);
9255 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9256 2, arg0, arg1);
9257 tree tgt = save_expr (call);
9258 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9259 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9260 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9262 if (ovf_only)
9263 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9265 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9266 tree store
9267 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9268 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9271 /* Fold a call to __builtin_FILE to a constant string. */
9273 static inline tree
9274 fold_builtin_FILE (location_t loc)
9276 if (const char *fname = LOCATION_FILE (loc))
9278 /* The documentation says this builtin is equivalent to the preprocessor
9279 __FILE__ macro so it appears appropriate to use the same file prefix
9280 mappings. */
9281 fname = remap_macro_filename (fname);
9282 return build_string_literal (strlen (fname) + 1, fname);
9285 return build_string_literal (1, "");
9288 /* Fold a call to __builtin_FUNCTION to a constant string. */
9290 static inline tree
9291 fold_builtin_FUNCTION ()
9293 const char *name = "";
9295 if (current_function_decl)
9296 name = lang_hooks.decl_printable_name (current_function_decl, 0);
9298 return build_string_literal (strlen (name) + 1, name);
9301 /* Fold a call to __builtin_LINE to an integer constant. */
9303 static inline tree
9304 fold_builtin_LINE (location_t loc, tree type)
9306 return build_int_cst (type, LOCATION_LINE (loc));
9309 /* Fold a call to built-in function FNDECL with 0 arguments.
9310 This function returns NULL_TREE if no simplification was possible. */
9312 static tree
9313 fold_builtin_0 (location_t loc, tree fndecl)
9315 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9316 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9317 switch (fcode)
9319 case BUILT_IN_FILE:
9320 return fold_builtin_FILE (loc);
9322 case BUILT_IN_FUNCTION:
9323 return fold_builtin_FUNCTION ();
9325 case BUILT_IN_LINE:
9326 return fold_builtin_LINE (loc, type);
9328 CASE_FLT_FN (BUILT_IN_INF):
9329 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
9330 case BUILT_IN_INFD32:
9331 case BUILT_IN_INFD64:
9332 case BUILT_IN_INFD128:
9333 return fold_builtin_inf (loc, type, true);
9335 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9336 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
9337 return fold_builtin_inf (loc, type, false);
9339 case BUILT_IN_CLASSIFY_TYPE:
9340 return fold_builtin_classify_type (NULL_TREE);
9342 default:
9343 break;
9345 return NULL_TREE;
9348 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9349 This function returns NULL_TREE if no simplification was possible. */
9351 static tree
9352 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9354 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9355 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9357 if (TREE_CODE (arg0) == ERROR_MARK)
9358 return NULL_TREE;
9360 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
9361 return ret;
9363 switch (fcode)
9365 case BUILT_IN_CONSTANT_P:
9367 tree val = fold_builtin_constant_p (arg0);
9369 /* Gimplification will pull the CALL_EXPR for the builtin out of
9370 an if condition. When not optimizing, we'll not CSE it back.
9371 To avoid link error types of regressions, return false now. */
9372 if (!val && !optimize)
9373 val = integer_zero_node;
9375 return val;
9378 case BUILT_IN_CLASSIFY_TYPE:
9379 return fold_builtin_classify_type (arg0);
9381 case BUILT_IN_STRLEN:
9382 return fold_builtin_strlen (loc, type, arg0);
9384 CASE_FLT_FN (BUILT_IN_FABS):
9385 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9386 case BUILT_IN_FABSD32:
9387 case BUILT_IN_FABSD64:
9388 case BUILT_IN_FABSD128:
9389 return fold_builtin_fabs (loc, arg0, type);
9391 case BUILT_IN_ABS:
9392 case BUILT_IN_LABS:
9393 case BUILT_IN_LLABS:
9394 case BUILT_IN_IMAXABS:
9395 return fold_builtin_abs (loc, arg0, type);
9397 CASE_FLT_FN (BUILT_IN_CONJ):
9398 if (validate_arg (arg0, COMPLEX_TYPE)
9399 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9400 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9401 break;
9403 CASE_FLT_FN (BUILT_IN_CREAL):
9404 if (validate_arg (arg0, COMPLEX_TYPE)
9405 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9406 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9407 break;
9409 CASE_FLT_FN (BUILT_IN_CIMAG):
9410 if (validate_arg (arg0, COMPLEX_TYPE)
9411 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9412 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9413 break;
9415 CASE_FLT_FN (BUILT_IN_CARG):
9416 return fold_builtin_carg (loc, arg0, type);
9418 case BUILT_IN_ISASCII:
9419 return fold_builtin_isascii (loc, arg0);
9421 case BUILT_IN_TOASCII:
9422 return fold_builtin_toascii (loc, arg0);
9424 case BUILT_IN_ISDIGIT:
9425 return fold_builtin_isdigit (loc, arg0);
9427 CASE_FLT_FN (BUILT_IN_FINITE):
9428 case BUILT_IN_FINITED32:
9429 case BUILT_IN_FINITED64:
9430 case BUILT_IN_FINITED128:
9431 case BUILT_IN_ISFINITE:
9433 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9434 if (ret)
9435 return ret;
9436 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9439 CASE_FLT_FN (BUILT_IN_ISINF):
9440 case BUILT_IN_ISINFD32:
9441 case BUILT_IN_ISINFD64:
9442 case BUILT_IN_ISINFD128:
9444 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9445 if (ret)
9446 return ret;
9447 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9450 case BUILT_IN_ISNORMAL:
9451 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9453 case BUILT_IN_ISINF_SIGN:
9454 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9456 CASE_FLT_FN (BUILT_IN_ISNAN):
9457 case BUILT_IN_ISNAND32:
9458 case BUILT_IN_ISNAND64:
9459 case BUILT_IN_ISNAND128:
9460 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9462 case BUILT_IN_FREE:
9463 if (integer_zerop (arg0))
9464 return build_empty_stmt (loc);
9465 break;
9467 default:
9468 break;
9471 return NULL_TREE;
9475 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9476 This function returns NULL_TREE if no simplification was possible. */
9478 static tree
9479 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
9481 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9482 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9484 if (TREE_CODE (arg0) == ERROR_MARK
9485 || TREE_CODE (arg1) == ERROR_MARK)
9486 return NULL_TREE;
9488 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9489 return ret;
9491 switch (fcode)
9493 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9494 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9495 if (validate_arg (arg0, REAL_TYPE)
9496 && validate_arg (arg1, POINTER_TYPE))
9497 return do_mpfr_lgamma_r (arg0, arg1, type);
9498 break;
9500 CASE_FLT_FN (BUILT_IN_FREXP):
9501 return fold_builtin_frexp (loc, arg0, arg1, type);
9503 CASE_FLT_FN (BUILT_IN_MODF):
9504 return fold_builtin_modf (loc, arg0, arg1, type);
9506 case BUILT_IN_STRSPN:
9507 return fold_builtin_strspn (loc, arg0, arg1);
9509 case BUILT_IN_STRCSPN:
9510 return fold_builtin_strcspn (loc, arg0, arg1);
9512 case BUILT_IN_STRPBRK:
9513 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9515 case BUILT_IN_EXPECT:
9516 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
9518 case BUILT_IN_ISGREATER:
9519 return fold_builtin_unordered_cmp (loc, fndecl,
9520 arg0, arg1, UNLE_EXPR, LE_EXPR);
9521 case BUILT_IN_ISGREATEREQUAL:
9522 return fold_builtin_unordered_cmp (loc, fndecl,
9523 arg0, arg1, UNLT_EXPR, LT_EXPR);
9524 case BUILT_IN_ISLESS:
9525 return fold_builtin_unordered_cmp (loc, fndecl,
9526 arg0, arg1, UNGE_EXPR, GE_EXPR);
9527 case BUILT_IN_ISLESSEQUAL:
9528 return fold_builtin_unordered_cmp (loc, fndecl,
9529 arg0, arg1, UNGT_EXPR, GT_EXPR);
9530 case BUILT_IN_ISLESSGREATER:
9531 return fold_builtin_unordered_cmp (loc, fndecl,
9532 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9533 case BUILT_IN_ISUNORDERED:
9534 return fold_builtin_unordered_cmp (loc, fndecl,
9535 arg0, arg1, UNORDERED_EXPR,
9536 NOP_EXPR);
9538 /* We do the folding for va_start in the expander. */
9539 case BUILT_IN_VA_START:
9540 break;
9542 case BUILT_IN_OBJECT_SIZE:
9543 return fold_builtin_object_size (arg0, arg1);
9545 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9546 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9548 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9549 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9551 default:
9552 break;
9554 return NULL_TREE;
9557 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9558 and ARG2.
9559 This function returns NULL_TREE if no simplification was possible. */
9561 static tree
9562 fold_builtin_3 (location_t loc, tree fndecl,
9563 tree arg0, tree arg1, tree arg2)
9565 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9566 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9568 if (TREE_CODE (arg0) == ERROR_MARK
9569 || TREE_CODE (arg1) == ERROR_MARK
9570 || TREE_CODE (arg2) == ERROR_MARK)
9571 return NULL_TREE;
9573 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9574 arg0, arg1, arg2))
9575 return ret;
9577 switch (fcode)
9580 CASE_FLT_FN (BUILT_IN_SINCOS):
9581 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9583 CASE_FLT_FN (BUILT_IN_REMQUO):
9584 if (validate_arg (arg0, REAL_TYPE)
9585 && validate_arg (arg1, REAL_TYPE)
9586 && validate_arg (arg2, POINTER_TYPE))
9587 return do_mpfr_remquo (arg0, arg1, arg2);
9588 break;
9590 case BUILT_IN_MEMCMP:
9591 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9593 case BUILT_IN_EXPECT:
9594 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
9596 case BUILT_IN_EXPECT_WITH_PROBABILITY:
9597 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
9599 case BUILT_IN_ADD_OVERFLOW:
9600 case BUILT_IN_SUB_OVERFLOW:
9601 case BUILT_IN_MUL_OVERFLOW:
9602 case BUILT_IN_ADD_OVERFLOW_P:
9603 case BUILT_IN_SUB_OVERFLOW_P:
9604 case BUILT_IN_MUL_OVERFLOW_P:
9605 case BUILT_IN_SADD_OVERFLOW:
9606 case BUILT_IN_SADDL_OVERFLOW:
9607 case BUILT_IN_SADDLL_OVERFLOW:
9608 case BUILT_IN_SSUB_OVERFLOW:
9609 case BUILT_IN_SSUBL_OVERFLOW:
9610 case BUILT_IN_SSUBLL_OVERFLOW:
9611 case BUILT_IN_SMUL_OVERFLOW:
9612 case BUILT_IN_SMULL_OVERFLOW:
9613 case BUILT_IN_SMULLL_OVERFLOW:
9614 case BUILT_IN_UADD_OVERFLOW:
9615 case BUILT_IN_UADDL_OVERFLOW:
9616 case BUILT_IN_UADDLL_OVERFLOW:
9617 case BUILT_IN_USUB_OVERFLOW:
9618 case BUILT_IN_USUBL_OVERFLOW:
9619 case BUILT_IN_USUBLL_OVERFLOW:
9620 case BUILT_IN_UMUL_OVERFLOW:
9621 case BUILT_IN_UMULL_OVERFLOW:
9622 case BUILT_IN_UMULLL_OVERFLOW:
9623 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9625 default:
9626 break;
9628 return NULL_TREE;
9631 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9632 arguments. IGNORE is true if the result of the
9633 function call is ignored. This function returns NULL_TREE if no
9634 simplification was possible. */
9636 tree
9637 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9639 tree ret = NULL_TREE;
9641 switch (nargs)
9643 case 0:
9644 ret = fold_builtin_0 (loc, fndecl);
9645 break;
9646 case 1:
9647 ret = fold_builtin_1 (loc, fndecl, args[0]);
9648 break;
9649 case 2:
9650 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9651 break;
9652 case 3:
9653 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9654 break;
9655 default:
9656 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9657 break;
9659 if (ret)
9661 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9662 SET_EXPR_LOCATION (ret, loc);
9663 return ret;
9665 return NULL_TREE;
9668 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9669 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9670 of arguments in ARGS to be omitted. OLDNARGS is the number of
9671 elements in ARGS. */
9673 static tree
9674 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9675 int skip, tree fndecl, int n, va_list newargs)
9677 int nargs = oldnargs - skip + n;
9678 tree *buffer;
9680 if (n > 0)
9682 int i, j;
9684 buffer = XALLOCAVEC (tree, nargs);
9685 for (i = 0; i < n; i++)
9686 buffer[i] = va_arg (newargs, tree);
9687 for (j = skip; j < oldnargs; j++, i++)
9688 buffer[i] = args[j];
9690 else
9691 buffer = args + skip;
9693 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9696 /* Return true if FNDECL shouldn't be folded right now.
9697 If a built-in function has an inline attribute always_inline
9698 wrapper, defer folding it after always_inline functions have
9699 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9700 might not be performed. */
9702 bool
9703 avoid_folding_inline_builtin (tree fndecl)
9705 return (DECL_DECLARED_INLINE_P (fndecl)
9706 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9707 && cfun
9708 && !cfun->always_inline_functions_inlined
9709 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9712 /* A wrapper function for builtin folding that prevents warnings for
9713 "statement without effect" and the like, caused by removing the
9714 call node earlier than the warning is generated. */
9716 tree
9717 fold_call_expr (location_t loc, tree exp, bool ignore)
9719 tree ret = NULL_TREE;
9720 tree fndecl = get_callee_fndecl (exp);
9721 if (fndecl && fndecl_built_in_p (fndecl)
9722 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9723 yet. Defer folding until we see all the arguments
9724 (after inlining). */
9725 && !CALL_EXPR_VA_ARG_PACK (exp))
9727 int nargs = call_expr_nargs (exp);
9729 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9730 instead last argument is __builtin_va_arg_pack (). Defer folding
9731 even in that case, until arguments are finalized. */
9732 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9734 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9735 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9736 return NULL_TREE;
9739 if (avoid_folding_inline_builtin (fndecl))
9740 return NULL_TREE;
9742 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9743 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9744 CALL_EXPR_ARGP (exp), ignore);
9745 else
9747 tree *args = CALL_EXPR_ARGP (exp);
9748 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9749 if (ret)
9750 return ret;
9753 return NULL_TREE;
9756 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9757 N arguments are passed in the array ARGARRAY. Return a folded
9758 expression or NULL_TREE if no simplification was possible. */
9760 tree
9761 fold_builtin_call_array (location_t loc, tree,
9762 tree fn,
9763 int n,
9764 tree *argarray)
9766 if (TREE_CODE (fn) != ADDR_EXPR)
9767 return NULL_TREE;
9769 tree fndecl = TREE_OPERAND (fn, 0);
9770 if (TREE_CODE (fndecl) == FUNCTION_DECL
9771 && fndecl_built_in_p (fndecl))
9773 /* If last argument is __builtin_va_arg_pack (), arguments to this
9774 function are not finalized yet. Defer folding until they are. */
9775 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9777 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9778 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9779 return NULL_TREE;
9781 if (avoid_folding_inline_builtin (fndecl))
9782 return NULL_TREE;
9783 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9784 return targetm.fold_builtin (fndecl, n, argarray, false);
9785 else
9786 return fold_builtin_n (loc, fndecl, argarray, n, false);
9789 return NULL_TREE;
9792 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9793 along with N new arguments specified as the "..." parameters. SKIP
9794 is the number of arguments in EXP to be omitted. This function is used
9795 to do varargs-to-varargs transformations. */
9797 static tree
9798 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9800 va_list ap;
9801 tree t;
9803 va_start (ap, n);
9804 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9805 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9806 va_end (ap);
9808 return t;
9811 /* Validate a single argument ARG against a tree code CODE representing
9812 a type. Return true when argument is valid. */
9814 static bool
9815 validate_arg (const_tree arg, enum tree_code code)
9817 if (!arg)
9818 return false;
9819 else if (code == POINTER_TYPE)
9820 return POINTER_TYPE_P (TREE_TYPE (arg));
9821 else if (code == INTEGER_TYPE)
9822 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9823 return code == TREE_CODE (TREE_TYPE (arg));
9826 /* This function validates the types of a function call argument list
9827 against a specified list of tree_codes. If the last specifier is a 0,
9828 that represents an ellipses, otherwise the last specifier must be a
9829 VOID_TYPE.
9831 This is the GIMPLE version of validate_arglist. Eventually we want to
9832 completely convert builtins.c to work from GIMPLEs and the tree based
9833 validate_arglist will then be removed. */
9835 bool
9836 validate_gimple_arglist (const gcall *call, ...)
9838 enum tree_code code;
9839 bool res = 0;
9840 va_list ap;
9841 const_tree arg;
9842 size_t i;
9844 va_start (ap, call);
9845 i = 0;
9849 code = (enum tree_code) va_arg (ap, int);
9850 switch (code)
9852 case 0:
9853 /* This signifies an ellipses, any further arguments are all ok. */
9854 res = true;
9855 goto end;
9856 case VOID_TYPE:
9857 /* This signifies an endlink, if no arguments remain, return
9858 true, otherwise return false. */
9859 res = (i == gimple_call_num_args (call));
9860 goto end;
9861 default:
9862 /* If no parameters remain or the parameter's code does not
9863 match the specified code, return false. Otherwise continue
9864 checking any remaining arguments. */
9865 arg = gimple_call_arg (call, i++);
9866 if (!validate_arg (arg, code))
9867 goto end;
9868 break;
9871 while (1);
9873 /* We need gotos here since we can only have one VA_CLOSE in a
9874 function. */
9875 end: ;
9876 va_end (ap);
9878 return res;
9881 /* Default target-specific builtin expander that does nothing. */
9884 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9885 rtx target ATTRIBUTE_UNUSED,
9886 rtx subtarget ATTRIBUTE_UNUSED,
9887 machine_mode mode ATTRIBUTE_UNUSED,
9888 int ignore ATTRIBUTE_UNUSED)
9890 return NULL_RTX;
9893 /* Returns true is EXP represents data that would potentially reside
9894 in a readonly section. */
9896 bool
9897 readonly_data_expr (tree exp)
9899 STRIP_NOPS (exp);
9901 if (TREE_CODE (exp) != ADDR_EXPR)
9902 return false;
9904 exp = get_base_address (TREE_OPERAND (exp, 0));
9905 if (!exp)
9906 return false;
9908 /* Make sure we call decl_readonly_section only for trees it
9909 can handle (since it returns true for everything it doesn't
9910 understand). */
9911 if (TREE_CODE (exp) == STRING_CST
9912 || TREE_CODE (exp) == CONSTRUCTOR
9913 || (VAR_P (exp) && TREE_STATIC (exp)))
9914 return decl_readonly_section (exp, 0);
9915 else
9916 return false;
9919 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9920 to the call, and TYPE is its return type.
9922 Return NULL_TREE if no simplification was possible, otherwise return the
9923 simplified form of the call as a tree.
9925 The simplified form may be a constant or other expression which
9926 computes the same value, but in a more efficient manner (including
9927 calls to other builtin functions).
9929 The call may contain arguments which need to be evaluated, but
9930 which are not useful to determine the result of the call. In
9931 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9932 COMPOUND_EXPR will be an argument which must be evaluated.
9933 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9934 COMPOUND_EXPR in the chain will contain the tree for the simplified
9935 form of the builtin function call. */
9937 static tree
9938 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9940 if (!validate_arg (s1, POINTER_TYPE)
9941 || !validate_arg (s2, POINTER_TYPE))
9942 return NULL_TREE;
9943 else
9945 tree fn;
9946 const char *p1, *p2;
9948 p2 = c_getstr (s2);
9949 if (p2 == NULL)
9950 return NULL_TREE;
9952 p1 = c_getstr (s1);
9953 if (p1 != NULL)
9955 const char *r = strpbrk (p1, p2);
9956 tree tem;
9958 if (r == NULL)
9959 return build_int_cst (TREE_TYPE (s1), 0);
9961 /* Return an offset into the constant string argument. */
9962 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9963 return fold_convert_loc (loc, type, tem);
9966 if (p2[0] == '\0')
9967 /* strpbrk(x, "") == NULL.
9968 Evaluate and ignore s1 in case it had side-effects. */
9969 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
9971 if (p2[1] != '\0')
9972 return NULL_TREE; /* Really call strpbrk. */
9974 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9975 if (!fn)
9976 return NULL_TREE;
9978 /* New argument list transforming strpbrk(s1, s2) to
9979 strchr(s1, s2[0]). */
9980 return build_call_expr_loc (loc, fn, 2, s1,
9981 build_int_cst (integer_type_node, p2[0]));
9985 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9986 to the call.
9988 Return NULL_TREE if no simplification was possible, otherwise return the
9989 simplified form of the call as a tree.
9991 The simplified form may be a constant or other expression which
9992 computes the same value, but in a more efficient manner (including
9993 calls to other builtin functions).
9995 The call may contain arguments which need to be evaluated, but
9996 which are not useful to determine the result of the call. In
9997 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9998 COMPOUND_EXPR will be an argument which must be evaluated.
9999 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10000 COMPOUND_EXPR in the chain will contain the tree for the simplified
10001 form of the builtin function call. */
10003 static tree
10004 fold_builtin_strspn (location_t loc, tree s1, tree s2)
10006 if (!validate_arg (s1, POINTER_TYPE)
10007 || !validate_arg (s2, POINTER_TYPE))
10008 return NULL_TREE;
10009 else
10011 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10013 /* If either argument is "", return NULL_TREE. */
10014 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10015 /* Evaluate and ignore both arguments in case either one has
10016 side-effects. */
10017 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
10018 s1, s2);
10019 return NULL_TREE;
10023 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10024 to the call.
10026 Return NULL_TREE if no simplification was possible, otherwise return the
10027 simplified form of the call as a tree.
10029 The simplified form may be a constant or other expression which
10030 computes the same value, but in a more efficient manner (including
10031 calls to other builtin functions).
10033 The call may contain arguments which need to be evaluated, but
10034 which are not useful to determine the result of the call. In
10035 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10036 COMPOUND_EXPR will be an argument which must be evaluated.
10037 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10038 COMPOUND_EXPR in the chain will contain the tree for the simplified
10039 form of the builtin function call. */
10041 static tree
10042 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
10044 if (!validate_arg (s1, POINTER_TYPE)
10045 || !validate_arg (s2, POINTER_TYPE))
10046 return NULL_TREE;
10047 else
10049 /* If the first argument is "", return NULL_TREE. */
10050 const char *p1 = c_getstr (s1);
10051 if (p1 && *p1 == '\0')
10053 /* Evaluate and ignore argument s2 in case it has
10054 side-effects. */
10055 return omit_one_operand_loc (loc, size_type_node,
10056 size_zero_node, s2);
10059 /* If the second argument is "", return __builtin_strlen(s1). */
10060 const char *p2 = c_getstr (s2);
10061 if (p2 && *p2 == '\0')
10063 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
10065 /* If the replacement _DECL isn't initialized, don't do the
10066 transformation. */
10067 if (!fn)
10068 return NULL_TREE;
10070 return build_call_expr_loc (loc, fn, 1, s1);
10072 return NULL_TREE;
10076 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10077 produced. False otherwise. This is done so that we don't output the error
10078 or warning twice or three times. */
10080 bool
10081 fold_builtin_next_arg (tree exp, bool va_start_p)
10083 tree fntype = TREE_TYPE (current_function_decl);
10084 int nargs = call_expr_nargs (exp);
10085 tree arg;
10086 /* There is good chance the current input_location points inside the
10087 definition of the va_start macro (perhaps on the token for
10088 builtin) in a system header, so warnings will not be emitted.
10089 Use the location in real source code. */
10090 source_location current_location =
10091 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10092 NULL);
10094 if (!stdarg_p (fntype))
10096 error ("%<va_start%> used in function with fixed args");
10097 return true;
10100 if (va_start_p)
10102 if (va_start_p && (nargs != 2))
10104 error ("wrong number of arguments to function %<va_start%>");
10105 return true;
10107 arg = CALL_EXPR_ARG (exp, 1);
10109 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10110 when we checked the arguments and if needed issued a warning. */
10111 else
10113 if (nargs == 0)
10115 /* Evidently an out of date version of <stdarg.h>; can't validate
10116 va_start's second argument, but can still work as intended. */
10117 warning_at (current_location,
10118 OPT_Wvarargs,
10119 "%<__builtin_next_arg%> called without an argument");
10120 return true;
10122 else if (nargs > 1)
10124 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10125 return true;
10127 arg = CALL_EXPR_ARG (exp, 0);
10130 if (TREE_CODE (arg) == SSA_NAME)
10131 arg = SSA_NAME_VAR (arg);
10133 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10134 or __builtin_next_arg (0) the first time we see it, after checking
10135 the arguments and if needed issuing a warning. */
10136 if (!integer_zerop (arg))
10138 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10140 /* Strip off all nops for the sake of the comparison. This
10141 is not quite the same as STRIP_NOPS. It does more.
10142 We must also strip off INDIRECT_EXPR for C++ reference
10143 parameters. */
10144 while (CONVERT_EXPR_P (arg)
10145 || TREE_CODE (arg) == INDIRECT_REF)
10146 arg = TREE_OPERAND (arg, 0);
10147 if (arg != last_parm)
10149 /* FIXME: Sometimes with the tree optimizers we can get the
10150 not the last argument even though the user used the last
10151 argument. We just warn and set the arg to be the last
10152 argument so that we will get wrong-code because of
10153 it. */
10154 warning_at (current_location,
10155 OPT_Wvarargs,
10156 "second parameter of %<va_start%> not last named argument");
10159 /* Undefined by C99 7.15.1.4p4 (va_start):
10160 "If the parameter parmN is declared with the register storage
10161 class, with a function or array type, or with a type that is
10162 not compatible with the type that results after application of
10163 the default argument promotions, the behavior is undefined."
10165 else if (DECL_REGISTER (arg))
10167 warning_at (current_location,
10168 OPT_Wvarargs,
10169 "undefined behavior when second parameter of "
10170 "%<va_start%> is declared with %<register%> storage");
10173 /* We want to verify the second parameter just once before the tree
10174 optimizers are run and then avoid keeping it in the tree,
10175 as otherwise we could warn even for correct code like:
10176 void foo (int i, ...)
10177 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10178 if (va_start_p)
10179 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10180 else
10181 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10183 return false;
10187 /* Expand a call EXP to __builtin_object_size. */
10189 static rtx
10190 expand_builtin_object_size (tree exp)
10192 tree ost;
10193 int object_size_type;
10194 tree fndecl = get_callee_fndecl (exp);
10196 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10198 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
10199 exp, fndecl);
10200 expand_builtin_trap ();
10201 return const0_rtx;
10204 ost = CALL_EXPR_ARG (exp, 1);
10205 STRIP_NOPS (ost);
10207 if (TREE_CODE (ost) != INTEGER_CST
10208 || tree_int_cst_sgn (ost) < 0
10209 || compare_tree_int (ost, 3) > 0)
10211 error ("%Klast argument of %qD is not integer constant between 0 and 3",
10212 exp, fndecl);
10213 expand_builtin_trap ();
10214 return const0_rtx;
10217 object_size_type = tree_to_shwi (ost);
10219 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10222 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10223 FCODE is the BUILT_IN_* to use.
10224 Return NULL_RTX if we failed; the caller should emit a normal call,
10225 otherwise try to get the result in TARGET, if convenient (and in
10226 mode MODE if that's convenient). */
10228 static rtx
10229 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10230 enum built_in_function fcode)
10232 if (!validate_arglist (exp,
10233 POINTER_TYPE,
10234 fcode == BUILT_IN_MEMSET_CHK
10235 ? INTEGER_TYPE : POINTER_TYPE,
10236 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10237 return NULL_RTX;
10239 tree dest = CALL_EXPR_ARG (exp, 0);
10240 tree src = CALL_EXPR_ARG (exp, 1);
10241 tree len = CALL_EXPR_ARG (exp, 2);
10242 tree size = CALL_EXPR_ARG (exp, 3);
10244 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
10245 /*str=*/NULL_TREE, size);
10247 if (!tree_fits_uhwi_p (size))
10248 return NULL_RTX;
10250 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10252 /* Avoid transforming the checking call to an ordinary one when
10253 an overflow has been detected or when the call couldn't be
10254 validated because the size is not constant. */
10255 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10256 return NULL_RTX;
10258 tree fn = NULL_TREE;
10259 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10260 mem{cpy,pcpy,move,set} is available. */
10261 switch (fcode)
10263 case BUILT_IN_MEMCPY_CHK:
10264 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10265 break;
10266 case BUILT_IN_MEMPCPY_CHK:
10267 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10268 break;
10269 case BUILT_IN_MEMMOVE_CHK:
10270 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10271 break;
10272 case BUILT_IN_MEMSET_CHK:
10273 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10274 break;
10275 default:
10276 break;
10279 if (! fn)
10280 return NULL_RTX;
10282 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10283 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10284 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10285 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10287 else if (fcode == BUILT_IN_MEMSET_CHK)
10288 return NULL_RTX;
10289 else
10291 unsigned int dest_align = get_pointer_alignment (dest);
10293 /* If DEST is not a pointer type, call the normal function. */
10294 if (dest_align == 0)
10295 return NULL_RTX;
10297 /* If SRC and DEST are the same (and not volatile), do nothing. */
10298 if (operand_equal_p (src, dest, 0))
10300 tree expr;
10302 if (fcode != BUILT_IN_MEMPCPY_CHK)
10304 /* Evaluate and ignore LEN in case it has side-effects. */
10305 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10306 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10309 expr = fold_build_pointer_plus (dest, len);
10310 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10313 /* __memmove_chk special case. */
10314 if (fcode == BUILT_IN_MEMMOVE_CHK)
10316 unsigned int src_align = get_pointer_alignment (src);
10318 if (src_align == 0)
10319 return NULL_RTX;
10321 /* If src is categorized for a readonly section we can use
10322 normal __memcpy_chk. */
10323 if (readonly_data_expr (src))
10325 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10326 if (!fn)
10327 return NULL_RTX;
10328 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10329 dest, src, len, size);
10330 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10331 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10332 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10335 return NULL_RTX;
10339 /* Emit warning if a buffer overflow is detected at compile time. */
10341 static void
10342 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10344 /* The source string. */
10345 tree srcstr = NULL_TREE;
10346 /* The size of the destination object. */
10347 tree objsize = NULL_TREE;
10348 /* The string that is being concatenated with (as in __strcat_chk)
10349 or null if it isn't. */
10350 tree catstr = NULL_TREE;
10351 /* The maximum length of the source sequence in a bounded operation
10352 (such as __strncat_chk) or null if the operation isn't bounded
10353 (such as __strcat_chk). */
10354 tree maxread = NULL_TREE;
10355 /* The exact size of the access (such as in __strncpy_chk). */
10356 tree size = NULL_TREE;
10358 switch (fcode)
10360 case BUILT_IN_STRCPY_CHK:
10361 case BUILT_IN_STPCPY_CHK:
10362 srcstr = CALL_EXPR_ARG (exp, 1);
10363 objsize = CALL_EXPR_ARG (exp, 2);
10364 break;
10366 case BUILT_IN_STRCAT_CHK:
10367 /* For __strcat_chk the warning will be emitted only if overflowing
10368 by at least strlen (dest) + 1 bytes. */
10369 catstr = CALL_EXPR_ARG (exp, 0);
10370 srcstr = CALL_EXPR_ARG (exp, 1);
10371 objsize = CALL_EXPR_ARG (exp, 2);
10372 break;
10374 case BUILT_IN_STRNCAT_CHK:
10375 catstr = CALL_EXPR_ARG (exp, 0);
10376 srcstr = CALL_EXPR_ARG (exp, 1);
10377 maxread = CALL_EXPR_ARG (exp, 2);
10378 objsize = CALL_EXPR_ARG (exp, 3);
10379 break;
10381 case BUILT_IN_STRNCPY_CHK:
10382 case BUILT_IN_STPNCPY_CHK:
10383 srcstr = CALL_EXPR_ARG (exp, 1);
10384 size = CALL_EXPR_ARG (exp, 2);
10385 objsize = CALL_EXPR_ARG (exp, 3);
10386 break;
10388 case BUILT_IN_SNPRINTF_CHK:
10389 case BUILT_IN_VSNPRINTF_CHK:
10390 maxread = CALL_EXPR_ARG (exp, 1);
10391 objsize = CALL_EXPR_ARG (exp, 3);
10392 break;
10393 default:
10394 gcc_unreachable ();
10397 if (catstr && maxread)
10399 /* Check __strncat_chk. There is no way to determine the length
10400 of the string to which the source string is being appended so
10401 just warn when the length of the source string is not known. */
10402 check_strncat_sizes (exp, objsize);
10403 return;
10406 /* The destination argument is the first one for all built-ins above. */
10407 tree dst = CALL_EXPR_ARG (exp, 0);
10409 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
10412 /* Emit warning if a buffer overflow is detected at compile time
10413 in __sprintf_chk/__vsprintf_chk calls. */
10415 static void
10416 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10418 tree size, len, fmt;
10419 const char *fmt_str;
10420 int nargs = call_expr_nargs (exp);
10422 /* Verify the required arguments in the original call. */
10424 if (nargs < 4)
10425 return;
10426 size = CALL_EXPR_ARG (exp, 2);
10427 fmt = CALL_EXPR_ARG (exp, 3);
10429 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10430 return;
10432 /* Check whether the format is a literal string constant. */
10433 fmt_str = c_getstr (fmt);
10434 if (fmt_str == NULL)
10435 return;
10437 if (!init_target_chars ())
10438 return;
10440 /* If the format doesn't contain % args or %%, we know its size. */
10441 if (strchr (fmt_str, target_percent) == 0)
10442 len = build_int_cstu (size_type_node, strlen (fmt_str));
10443 /* If the format is "%s" and first ... argument is a string literal,
10444 we know it too. */
10445 else if (fcode == BUILT_IN_SPRINTF_CHK
10446 && strcmp (fmt_str, target_percent_s) == 0)
10448 tree arg;
10450 if (nargs < 5)
10451 return;
10452 arg = CALL_EXPR_ARG (exp, 4);
10453 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10454 return;
10456 len = c_strlen (arg, 1);
10457 if (!len || ! tree_fits_uhwi_p (len))
10458 return;
10460 else
10461 return;
10463 /* Add one for the terminating nul. */
10464 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10466 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
10467 /*maxread=*/NULL_TREE, len, size);
10470 /* Emit warning if a free is called with address of a variable. */
10472 static void
10473 maybe_emit_free_warning (tree exp)
10475 tree arg = CALL_EXPR_ARG (exp, 0);
10477 STRIP_NOPS (arg);
10478 if (TREE_CODE (arg) != ADDR_EXPR)
10479 return;
10481 arg = get_base_address (TREE_OPERAND (arg, 0));
10482 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
10483 return;
10485 if (SSA_VAR_P (arg))
10486 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10487 "%Kattempt to free a non-heap object %qD", exp, arg);
10488 else
10489 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10490 "%Kattempt to free a non-heap object", exp);
10493 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10494 if possible. */
10496 static tree
10497 fold_builtin_object_size (tree ptr, tree ost)
10499 unsigned HOST_WIDE_INT bytes;
10500 int object_size_type;
10502 if (!validate_arg (ptr, POINTER_TYPE)
10503 || !validate_arg (ost, INTEGER_TYPE))
10504 return NULL_TREE;
10506 STRIP_NOPS (ost);
10508 if (TREE_CODE (ost) != INTEGER_CST
10509 || tree_int_cst_sgn (ost) < 0
10510 || compare_tree_int (ost, 3) > 0)
10511 return NULL_TREE;
10513 object_size_type = tree_to_shwi (ost);
10515 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10516 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10517 and (size_t) 0 for types 2 and 3. */
10518 if (TREE_SIDE_EFFECTS (ptr))
10519 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10521 if (TREE_CODE (ptr) == ADDR_EXPR)
10523 compute_builtin_object_size (ptr, object_size_type, &bytes);
10524 if (wi::fits_to_tree_p (bytes, size_type_node))
10525 return build_int_cstu (size_type_node, bytes);
10527 else if (TREE_CODE (ptr) == SSA_NAME)
10529 /* If object size is not known yet, delay folding until
10530 later. Maybe subsequent passes will help determining
10531 it. */
10532 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10533 && wi::fits_to_tree_p (bytes, size_type_node))
10534 return build_int_cstu (size_type_node, bytes);
10537 return NULL_TREE;
10540 /* Builtins with folding operations that operate on "..." arguments
10541 need special handling; we need to store the arguments in a convenient
10542 data structure before attempting any folding. Fortunately there are
10543 only a few builtins that fall into this category. FNDECL is the
10544 function, EXP is the CALL_EXPR for the call. */
10546 static tree
10547 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10549 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10550 tree ret = NULL_TREE;
10552 switch (fcode)
10554 case BUILT_IN_FPCLASSIFY:
10555 ret = fold_builtin_fpclassify (loc, args, nargs);
10556 break;
10558 default:
10559 break;
10561 if (ret)
10563 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10564 SET_EXPR_LOCATION (ret, loc);
10565 TREE_NO_WARNING (ret) = 1;
10566 return ret;
10568 return NULL_TREE;
10571 /* Initialize format string characters in the target charset. */
10573 bool
10574 init_target_chars (void)
10576 static bool init;
10577 if (!init)
10579 target_newline = lang_hooks.to_target_charset ('\n');
10580 target_percent = lang_hooks.to_target_charset ('%');
10581 target_c = lang_hooks.to_target_charset ('c');
10582 target_s = lang_hooks.to_target_charset ('s');
10583 if (target_newline == 0 || target_percent == 0 || target_c == 0
10584 || target_s == 0)
10585 return false;
10587 target_percent_c[0] = target_percent;
10588 target_percent_c[1] = target_c;
10589 target_percent_c[2] = '\0';
10591 target_percent_s[0] = target_percent;
10592 target_percent_s[1] = target_s;
10593 target_percent_s[2] = '\0';
10595 target_percent_s_newline[0] = target_percent;
10596 target_percent_s_newline[1] = target_s;
10597 target_percent_s_newline[2] = target_newline;
10598 target_percent_s_newline[3] = '\0';
10600 init = true;
10602 return true;
10605 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10606 and no overflow/underflow occurred. INEXACT is true if M was not
10607 exactly calculated. TYPE is the tree type for the result. This
10608 function assumes that you cleared the MPFR flags and then
10609 calculated M to see if anything subsequently set a flag prior to
10610 entering this function. Return NULL_TREE if any checks fail. */
10612 static tree
10613 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10615 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10616 overflow/underflow occurred. If -frounding-math, proceed iff the
10617 result of calling FUNC was exact. */
10618 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10619 && (!flag_rounding_math || !inexact))
10621 REAL_VALUE_TYPE rr;
10623 real_from_mpfr (&rr, m, type, GMP_RNDN);
10624 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10625 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10626 but the mpft_t is not, then we underflowed in the
10627 conversion. */
10628 if (real_isfinite (&rr)
10629 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10631 REAL_VALUE_TYPE rmode;
10633 real_convert (&rmode, TYPE_MODE (type), &rr);
10634 /* Proceed iff the specified mode can hold the value. */
10635 if (real_identical (&rmode, &rr))
10636 return build_real (type, rmode);
10639 return NULL_TREE;
10642 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10643 number and no overflow/underflow occurred. INEXACT is true if M
10644 was not exactly calculated. TYPE is the tree type for the result.
10645 This function assumes that you cleared the MPFR flags and then
10646 calculated M to see if anything subsequently set a flag prior to
10647 entering this function. Return NULL_TREE if any checks fail, if
10648 FORCE_CONVERT is true, then bypass the checks. */
10650 static tree
10651 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10653 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10654 overflow/underflow occurred. If -frounding-math, proceed iff the
10655 result of calling FUNC was exact. */
10656 if (force_convert
10657 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10658 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10659 && (!flag_rounding_math || !inexact)))
10661 REAL_VALUE_TYPE re, im;
10663 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10664 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10665 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10666 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10667 but the mpft_t is not, then we underflowed in the
10668 conversion. */
10669 if (force_convert
10670 || (real_isfinite (&re) && real_isfinite (&im)
10671 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10672 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10674 REAL_VALUE_TYPE re_mode, im_mode;
10676 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10677 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10678 /* Proceed iff the specified mode can hold the value. */
10679 if (force_convert
10680 || (real_identical (&re_mode, &re)
10681 && real_identical (&im_mode, &im)))
10682 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10683 build_real (TREE_TYPE (type), im_mode));
10686 return NULL_TREE;
10689 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10690 the pointer *(ARG_QUO) and return the result. The type is taken
10691 from the type of ARG0 and is used for setting the precision of the
10692 calculation and results. */
10694 static tree
10695 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10697 tree const type = TREE_TYPE (arg0);
10698 tree result = NULL_TREE;
10700 STRIP_NOPS (arg0);
10701 STRIP_NOPS (arg1);
10703 /* To proceed, MPFR must exactly represent the target floating point
10704 format, which only happens when the target base equals two. */
10705 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10706 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10707 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10709 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10710 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10712 if (real_isfinite (ra0) && real_isfinite (ra1))
10714 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10715 const int prec = fmt->p;
10716 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10717 tree result_rem;
10718 long integer_quo;
10719 mpfr_t m0, m1;
10721 mpfr_inits2 (prec, m0, m1, NULL);
10722 mpfr_from_real (m0, ra0, GMP_RNDN);
10723 mpfr_from_real (m1, ra1, GMP_RNDN);
10724 mpfr_clear_flags ();
10725 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10726 /* Remquo is independent of the rounding mode, so pass
10727 inexact=0 to do_mpfr_ckconv(). */
10728 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10729 mpfr_clears (m0, m1, NULL);
10730 if (result_rem)
10732 /* MPFR calculates quo in the host's long so it may
10733 return more bits in quo than the target int can hold
10734 if sizeof(host long) > sizeof(target int). This can
10735 happen even for native compilers in LP64 mode. In
10736 these cases, modulo the quo value with the largest
10737 number that the target int can hold while leaving one
10738 bit for the sign. */
10739 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10740 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10742 /* Dereference the quo pointer argument. */
10743 arg_quo = build_fold_indirect_ref (arg_quo);
10744 /* Proceed iff a valid pointer type was passed in. */
10745 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10747 /* Set the value. */
10748 tree result_quo
10749 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10750 build_int_cst (TREE_TYPE (arg_quo),
10751 integer_quo));
10752 TREE_SIDE_EFFECTS (result_quo) = 1;
10753 /* Combine the quo assignment with the rem. */
10754 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10755 result_quo, result_rem));
10760 return result;
10763 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10764 resulting value as a tree with type TYPE. The mpfr precision is
10765 set to the precision of TYPE. We assume that this mpfr function
10766 returns zero if the result could be calculated exactly within the
10767 requested precision. In addition, the integer pointer represented
10768 by ARG_SG will be dereferenced and set to the appropriate signgam
10769 (-1,1) value. */
10771 static tree
10772 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10774 tree result = NULL_TREE;
10776 STRIP_NOPS (arg);
10778 /* To proceed, MPFR must exactly represent the target floating point
10779 format, which only happens when the target base equals two. Also
10780 verify ARG is a constant and that ARG_SG is an int pointer. */
10781 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10782 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10783 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10784 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10786 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10788 /* In addition to NaN and Inf, the argument cannot be zero or a
10789 negative integer. */
10790 if (real_isfinite (ra)
10791 && ra->cl != rvc_zero
10792 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10794 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10795 const int prec = fmt->p;
10796 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10797 int inexact, sg;
10798 mpfr_t m;
10799 tree result_lg;
10801 mpfr_init2 (m, prec);
10802 mpfr_from_real (m, ra, GMP_RNDN);
10803 mpfr_clear_flags ();
10804 inexact = mpfr_lgamma (m, &sg, m, rnd);
10805 result_lg = do_mpfr_ckconv (m, type, inexact);
10806 mpfr_clear (m);
10807 if (result_lg)
10809 tree result_sg;
10811 /* Dereference the arg_sg pointer argument. */
10812 arg_sg = build_fold_indirect_ref (arg_sg);
10813 /* Assign the signgam value into *arg_sg. */
10814 result_sg = fold_build2 (MODIFY_EXPR,
10815 TREE_TYPE (arg_sg), arg_sg,
10816 build_int_cst (TREE_TYPE (arg_sg), sg));
10817 TREE_SIDE_EFFECTS (result_sg) = 1;
10818 /* Combine the signgam assignment with the lgamma result. */
10819 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10820 result_sg, result_lg));
10825 return result;
10828 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10829 mpc function FUNC on it and return the resulting value as a tree
10830 with type TYPE. The mpfr precision is set to the precision of
10831 TYPE. We assume that function FUNC returns zero if the result
10832 could be calculated exactly within the requested precision. If
10833 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10834 in the arguments and/or results. */
10836 tree
10837 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10838 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10840 tree result = NULL_TREE;
10842 STRIP_NOPS (arg0);
10843 STRIP_NOPS (arg1);
10845 /* To proceed, MPFR must exactly represent the target floating point
10846 format, which only happens when the target base equals two. */
10847 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10848 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10849 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10850 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10851 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10853 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10854 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10855 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10856 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10858 if (do_nonfinite
10859 || (real_isfinite (re0) && real_isfinite (im0)
10860 && real_isfinite (re1) && real_isfinite (im1)))
10862 const struct real_format *const fmt =
10863 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10864 const int prec = fmt->p;
10865 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10866 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10867 int inexact;
10868 mpc_t m0, m1;
10870 mpc_init2 (m0, prec);
10871 mpc_init2 (m1, prec);
10872 mpfr_from_real (mpc_realref (m0), re0, rnd);
10873 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10874 mpfr_from_real (mpc_realref (m1), re1, rnd);
10875 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10876 mpfr_clear_flags ();
10877 inexact = func (m0, m0, m1, crnd);
10878 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10879 mpc_clear (m0);
10880 mpc_clear (m1);
10884 return result;
10887 /* A wrapper function for builtin folding that prevents warnings for
10888 "statement without effect" and the like, caused by removing the
10889 call node earlier than the warning is generated. */
10891 tree
10892 fold_call_stmt (gcall *stmt, bool ignore)
10894 tree ret = NULL_TREE;
10895 tree fndecl = gimple_call_fndecl (stmt);
10896 location_t loc = gimple_location (stmt);
10897 if (fndecl && fndecl_built_in_p (fndecl)
10898 && !gimple_call_va_arg_pack_p (stmt))
10900 int nargs = gimple_call_num_args (stmt);
10901 tree *args = (nargs > 0
10902 ? gimple_call_arg_ptr (stmt, 0)
10903 : &error_mark_node);
10905 if (avoid_folding_inline_builtin (fndecl))
10906 return NULL_TREE;
10907 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10909 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10911 else
10913 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10914 if (ret)
10916 /* Propagate location information from original call to
10917 expansion of builtin. Otherwise things like
10918 maybe_emit_chk_warning, that operate on the expansion
10919 of a builtin, will use the wrong location information. */
10920 if (gimple_has_location (stmt))
10922 tree realret = ret;
10923 if (TREE_CODE (ret) == NOP_EXPR)
10924 realret = TREE_OPERAND (ret, 0);
10925 if (CAN_HAVE_LOCATION_P (realret)
10926 && !EXPR_HAS_LOCATION (realret))
10927 SET_EXPR_LOCATION (realret, loc);
10928 return realret;
10930 return ret;
10934 return NULL_TREE;
10937 /* Look up the function in builtin_decl that corresponds to DECL
10938 and set ASMSPEC as its user assembler name. DECL must be a
10939 function decl that declares a builtin. */
10941 void
10942 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10944 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
10945 && asmspec != 0);
10947 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10948 set_user_assembler_name (builtin, asmspec);
10950 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10951 && INT_TYPE_SIZE < BITS_PER_WORD)
10953 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
10954 set_user_assembler_libfunc ("ffs", asmspec);
10955 set_optab_libfunc (ffs_optab, mode, "ffs");
10959 /* Return true if DECL is a builtin that expands to a constant or similarly
10960 simple code. */
10961 bool
10962 is_simple_builtin (tree decl)
10964 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
10965 switch (DECL_FUNCTION_CODE (decl))
10967 /* Builtins that expand to constants. */
10968 case BUILT_IN_CONSTANT_P:
10969 case BUILT_IN_EXPECT:
10970 case BUILT_IN_OBJECT_SIZE:
10971 case BUILT_IN_UNREACHABLE:
10972 /* Simple register moves or loads from stack. */
10973 case BUILT_IN_ASSUME_ALIGNED:
10974 case BUILT_IN_RETURN_ADDRESS:
10975 case BUILT_IN_EXTRACT_RETURN_ADDR:
10976 case BUILT_IN_FROB_RETURN_ADDR:
10977 case BUILT_IN_RETURN:
10978 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10979 case BUILT_IN_FRAME_ADDRESS:
10980 case BUILT_IN_VA_END:
10981 case BUILT_IN_STACK_SAVE:
10982 case BUILT_IN_STACK_RESTORE:
10983 /* Exception state returns or moves registers around. */
10984 case BUILT_IN_EH_FILTER:
10985 case BUILT_IN_EH_POINTER:
10986 case BUILT_IN_EH_COPY_VALUES:
10987 return true;
10989 default:
10990 return false;
10993 return false;
10996 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10997 most probably expanded inline into reasonably simple code. This is a
10998 superset of is_simple_builtin. */
10999 bool
11000 is_inexpensive_builtin (tree decl)
11002 if (!decl)
11003 return false;
11004 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
11005 return true;
11006 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11007 switch (DECL_FUNCTION_CODE (decl))
11009 case BUILT_IN_ABS:
11010 CASE_BUILT_IN_ALLOCA:
11011 case BUILT_IN_BSWAP16:
11012 case BUILT_IN_BSWAP32:
11013 case BUILT_IN_BSWAP64:
11014 case BUILT_IN_CLZ:
11015 case BUILT_IN_CLZIMAX:
11016 case BUILT_IN_CLZL:
11017 case BUILT_IN_CLZLL:
11018 case BUILT_IN_CTZ:
11019 case BUILT_IN_CTZIMAX:
11020 case BUILT_IN_CTZL:
11021 case BUILT_IN_CTZLL:
11022 case BUILT_IN_FFS:
11023 case BUILT_IN_FFSIMAX:
11024 case BUILT_IN_FFSL:
11025 case BUILT_IN_FFSLL:
11026 case BUILT_IN_IMAXABS:
11027 case BUILT_IN_FINITE:
11028 case BUILT_IN_FINITEF:
11029 case BUILT_IN_FINITEL:
11030 case BUILT_IN_FINITED32:
11031 case BUILT_IN_FINITED64:
11032 case BUILT_IN_FINITED128:
11033 case BUILT_IN_FPCLASSIFY:
11034 case BUILT_IN_ISFINITE:
11035 case BUILT_IN_ISINF_SIGN:
11036 case BUILT_IN_ISINF:
11037 case BUILT_IN_ISINFF:
11038 case BUILT_IN_ISINFL:
11039 case BUILT_IN_ISINFD32:
11040 case BUILT_IN_ISINFD64:
11041 case BUILT_IN_ISINFD128:
11042 case BUILT_IN_ISNAN:
11043 case BUILT_IN_ISNANF:
11044 case BUILT_IN_ISNANL:
11045 case BUILT_IN_ISNAND32:
11046 case BUILT_IN_ISNAND64:
11047 case BUILT_IN_ISNAND128:
11048 case BUILT_IN_ISNORMAL:
11049 case BUILT_IN_ISGREATER:
11050 case BUILT_IN_ISGREATEREQUAL:
11051 case BUILT_IN_ISLESS:
11052 case BUILT_IN_ISLESSEQUAL:
11053 case BUILT_IN_ISLESSGREATER:
11054 case BUILT_IN_ISUNORDERED:
11055 case BUILT_IN_VA_ARG_PACK:
11056 case BUILT_IN_VA_ARG_PACK_LEN:
11057 case BUILT_IN_VA_COPY:
11058 case BUILT_IN_TRAP:
11059 case BUILT_IN_SAVEREGS:
11060 case BUILT_IN_POPCOUNTL:
11061 case BUILT_IN_POPCOUNTLL:
11062 case BUILT_IN_POPCOUNTIMAX:
11063 case BUILT_IN_POPCOUNT:
11064 case BUILT_IN_PARITYL:
11065 case BUILT_IN_PARITYLL:
11066 case BUILT_IN_PARITYIMAX:
11067 case BUILT_IN_PARITY:
11068 case BUILT_IN_LABS:
11069 case BUILT_IN_LLABS:
11070 case BUILT_IN_PREFETCH:
11071 case BUILT_IN_ACC_ON_DEVICE:
11072 return true;
11074 default:
11075 return is_simple_builtin (decl);
11078 return false;
11081 /* Return true if T is a constant and the value cast to a target char
11082 can be represented by a host char.
11083 Store the casted char constant in *P if so. */
11085 bool
11086 target_char_cst_p (tree t, char *p)
11088 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
11089 return false;
11091 *p = (char)tree_to_uhwi (t);
11092 return true;
11095 /* Return the maximum object size. */
11097 tree
11098 max_object_size (void)
11100 /* To do: Make this a configurable parameter. */
11101 return TYPE_MAX_VALUE (ptrdiff_type_node);